entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "fileoverview Tests for vars-on-top rule.\n# @author Danny Fritz\n# @author Gyandeep Singh\n###\n'use strict'\n\n#-----",
"end": 70,
"score": 0.9998223185539246,
"start": 59,
"tag": "NAME",
"value": "Danny Fritz"
},
{
"context": " vars-on-top rule.\n# @author Danny Fritz... | src/tests/rules/vars-on-top.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for vars-on-top rule.
# @author Danny Fritz
# @author Gyandeep Singh
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/vars-on-top'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'vars-on-top', rule,
valid: [
'''
first = 0
foo = ->
first = 2
'''
'foo = ->'
'''
foo = ->
first = null
if yes
first = yes
else
first = 1
'''
'''
foo = ->
first = null
second = 1
{third} = null
[fourth] = 1
fifth = null
{...sixth} = third
[seventh...] = null
if yes
third = yes
first = second
'''
'''
foo = ->
for i in [0...10]
alert i
'''
'''
foo = ->
outer = null
inner = ->
inner = 1
outer = inner
outer = 1
'''
'''
foo = ->
first = null
#Hello
second = 1
first = second
'''
'''
foo = ->
first = null
###
Hello Clarice
###
second = 1
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
first = null
first = 5
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
third = null
third = 5
first = second
'''
'''
foo = ->
first = null
bar = ->
third = null
third = 5
first = 5
'}'
'''
'''
foo = ->
first = null
first.onclick ->
third = null
third = 5
first = 5
'''
'''
foo = ->
i = 0
alert j for j in [0...10]
i = i + 1
'''
'''
'use strict'
x = null
f()
'''
'''
'use strict'
'directive'
x = y = null
f()
'''
'''
f = ->
'use strict'
x = null
f()
'''
'''
f = ->
'use strict'
'directive'
x = null
y = null
f()
'''
'''
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
'use strict'
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import React from 'react'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import * as foo from 'mod.js'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { square, diag } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { default as foo } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import theDefault, { named1, named2 } from 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
export x = null
y = null
z = null
'''
'''
x = null
export y = null
z = null
'''
'''
x = null
y = null
export z = null
'''
]
invalid: [
code: '''
first = 0
foo = ->
first = 2
second = 2
second = 0
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
first = 1
first = 2
first = 3
first = 4
second = 1
second = 2
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
second = yes if yes
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
for i in [0...first]
second = i
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
switch first
when 10
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
hello = 1
catch e
alert 'error'
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
asdf
catch e
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
while first
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = [1, 2, 3]
item = null
for item in first
hello = item
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
f = ->
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
import {foo} from 'foo'
export {foo}
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export {foo} from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export * from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
]
| 93274 | ###*
# @fileoverview Tests for vars-on-top rule.
# @author <NAME>
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/vars-on-top'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'vars-on-top', rule,
valid: [
'''
first = 0
foo = ->
first = 2
'''
'foo = ->'
'''
foo = ->
first = null
if yes
first = yes
else
first = 1
'''
'''
foo = ->
first = null
second = 1
{third} = null
[fourth] = 1
fifth = null
{...sixth} = third
[seventh...] = null
if yes
third = yes
first = second
'''
'''
foo = ->
for i in [0...10]
alert i
'''
'''
foo = ->
outer = null
inner = ->
inner = 1
outer = inner
outer = 1
'''
'''
foo = ->
first = null
#Hello
second = 1
first = second
'''
'''
foo = ->
first = null
###
Hello Clarice
###
second = 1
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
first = null
first = 5
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
third = null
third = 5
first = second
'''
'''
foo = ->
first = null
bar = ->
third = null
third = 5
first = 5
'}'
'''
'''
foo = ->
first = null
first.onclick ->
third = null
third = 5
first = 5
'''
'''
foo = ->
i = 0
alert j for j in [0...10]
i = i + 1
'''
'''
'use strict'
x = null
f()
'''
'''
'use strict'
'directive'
x = y = null
f()
'''
'''
f = ->
'use strict'
x = null
f()
'''
'''
f = ->
'use strict'
'directive'
x = null
y = null
f()
'''
'''
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
'use strict'
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import React from 'react'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import * as foo from 'mod.js'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { square, diag } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { default as foo } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import theDefault, { named1, named2 } from 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
export x = null
y = null
z = null
'''
'''
x = null
export y = null
z = null
'''
'''
x = null
y = null
export z = null
'''
]
invalid: [
code: '''
first = 0
foo = ->
first = 2
second = 2
second = 0
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
first = 1
first = 2
first = 3
first = 4
second = 1
second = 2
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
second = yes if yes
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
for i in [0...first]
second = i
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
switch first
when 10
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
hello = 1
catch e
alert 'error'
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
asdf
catch e
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
while first
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = [1, 2, 3]
item = null
for item in first
hello = item
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
f = ->
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
import {foo} from 'foo'
export {foo}
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export {foo} from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export * from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
]
| true | ###*
# @fileoverview Tests for vars-on-top rule.
# @author PI:NAME:<NAME>END_PI
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/vars-on-top'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'vars-on-top', rule,
valid: [
'''
first = 0
foo = ->
first = 2
'''
'foo = ->'
'''
foo = ->
first = null
if yes
first = yes
else
first = 1
'''
'''
foo = ->
first = null
second = 1
{third} = null
[fourth] = 1
fifth = null
{...sixth} = third
[seventh...] = null
if yes
third = yes
first = second
'''
'''
foo = ->
for i in [0...10]
alert i
'''
'''
foo = ->
outer = null
inner = ->
inner = 1
outer = inner
outer = 1
'''
'''
foo = ->
first = null
#Hello
second = 1
first = second
'''
'''
foo = ->
first = null
###
Hello Clarice
###
second = 1
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
first = null
first = 5
first = second
'''
'''
foo = ->
first = null
second = 1
bar = ->
third = null
third = 5
first = second
'''
'''
foo = ->
first = null
bar = ->
third = null
third = 5
first = 5
'}'
'''
'''
foo = ->
first = null
first.onclick ->
third = null
third = 5
first = 5
'''
'''
foo = ->
i = 0
alert j for j in [0...10]
i = i + 1
'''
'''
'use strict'
x = null
f()
'''
'''
'use strict'
'directive'
x = y = null
f()
'''
'''
f = ->
'use strict'
x = null
f()
'''
'''
f = ->
'use strict'
'directive'
x = null
y = null
f()
'''
'''
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
'use strict'
import React from 'react'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import React from 'react'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import * as foo from 'mod.js'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { square, diag } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import { default as foo } from 'lib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
import theDefault, { named1, named2 } from 'src/mylib'
'use strict'
y = null
f = ->
'use strict'
x = null
y = null
f()
'''
'''
export x = null
y = null
z = null
'''
'''
x = null
export y = null
z = null
'''
'''
x = null
y = null
export z = null
'''
]
invalid: [
code: '''
first = 0
foo = ->
first = 2
second = 2
second = 0
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
first = 1
first = 2
first = 3
first = 4
second = 1
second = 2
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = null
second = yes if yes
first = second
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
for i in [0...first]
second = i
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
switch first
when 10
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
hello = 1
catch e
alert 'error'
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
i = null
try
asdf
catch e
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = 10
while first
hello = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
foo = ->
first = [1, 2, 3]
item = null
for item in first
hello = item
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
f = ->
'use strict'
0
x = null
f()
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
import {foo} from 'foo'
export {foo}
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export {foo} from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
,
code: '''
export * from 'foo'
test = 1
'''
errors: [
message:
'All declarations must be at the top of the function scope.'
type: 'Identifier'
]
]
|
[
{
"context": "equire 'async'\nHASHKEY = \"#{_config.redis.unique}:role\"\n\ngetKey = (project_id, member_id)->\n \"role:#{pr",
"end": 196,
"score": 0.865892767906189,
"start": 192,
"tag": "KEY",
"value": "role"
}
] | src/redis/authority.bak.coffee | kiteam/kiteam | 0 | _common = require '../common'
_config = _common.config
_entity = require '../entity/project_member'
_connect = require './connect'
_async = require 'async'
HASHKEY = "#{_config.redis.unique}:role"
getKey = (project_id, member_id)->
"role:#{project_id}:#{member_id}"
pushToRedis = (list, cb)->
index = 0
_async.whilst(
-> index < list.length
(done)->
row = list[index++]
return done(null) if not row.member_id
key = getKey row.project_id, row.member_id
_connect.redis.hset HASHKEY, key, row.role, done
cb
)
#检查用户的角色是否在某个角色列表里面
memberRoleInList = (role, list, isAllow)->
pattern = if isAllow then /(\+|^)([\w*]+)/ else /(-)([\w*]+)/
matches = list.match pattern
return false if not matches
exists = matches[2]
# *表示所有用户,但必需有一个角色存在
return (role and exists.indexOf('*') >= 0) or exists.indexOf(role) >= 0
exports.init = (cb)->
self = @
#_connect.redis.expire HASHKEY, 1000000
_connect.redis.select _config.redis.database || 0, -> self.loadAll ->
#加载所有用户的权限
exports.loadAll = (cb)->
#清除现有的
queue = []
queue.push(
(done)-> _connect.redis.del HASHKEY, (err)-> done err
)
queue.push(
(done)-> _entity.find null, done
)
queue.push(pushToRedis)
_async.waterfall queue, cb
#删除某个人的权限
exports.remove = (project_id, member_id, cb)->
_connect.redis.hdel HASHKEY, getKey(project_id, member_id)
#设置某个用户在项目中的权限
exports.update = (project_id, member_id, role, cb)->
_connect.redis.hset HASHKEY, getKey(project_id, member_id), role, cb
#获取一个用户在某个项目中的权限
exports.get = (project_id, member_id, cb)->
return cb(null) if not project_id or not member_id
_connect.redis.hget HASHKEY, getKey(project_id, member_id), cb
#请求项目级的权限许可
#expectRole,期待
exports.permission = (project_id, member, expectRoles, cb)->
return cb(null, true) if member.role is 'a'
@get project_id, member.member_id, (err, role)->
return cb err, false if err or not role
allow = exports.roleValidate expectRoles, role
cb err, allow, role
#检查用户的权限是否合法
#rules: 规则,role:用户所拥有的权限
exports.roleValidate = (rules, role)->
!memberRoleInList(role, rules, false) and memberRoleInList(role, rules, true) | 120387 | _common = require '../common'
_config = _common.config
_entity = require '../entity/project_member'
_connect = require './connect'
_async = require 'async'
HASHKEY = "#{_config.redis.unique}:<KEY>"
getKey = (project_id, member_id)->
"role:#{project_id}:#{member_id}"
pushToRedis = (list, cb)->
index = 0
_async.whilst(
-> index < list.length
(done)->
row = list[index++]
return done(null) if not row.member_id
key = getKey row.project_id, row.member_id
_connect.redis.hset HASHKEY, key, row.role, done
cb
)
#检查用户的角色是否在某个角色列表里面
memberRoleInList = (role, list, isAllow)->
pattern = if isAllow then /(\+|^)([\w*]+)/ else /(-)([\w*]+)/
matches = list.match pattern
return false if not matches
exists = matches[2]
# *表示所有用户,但必需有一个角色存在
return (role and exists.indexOf('*') >= 0) or exists.indexOf(role) >= 0
exports.init = (cb)->
self = @
#_connect.redis.expire HASHKEY, 1000000
_connect.redis.select _config.redis.database || 0, -> self.loadAll ->
#加载所有用户的权限
exports.loadAll = (cb)->
#清除现有的
queue = []
queue.push(
(done)-> _connect.redis.del HASHKEY, (err)-> done err
)
queue.push(
(done)-> _entity.find null, done
)
queue.push(pushToRedis)
_async.waterfall queue, cb
#删除某个人的权限
exports.remove = (project_id, member_id, cb)->
_connect.redis.hdel HASHKEY, getKey(project_id, member_id)
#设置某个用户在项目中的权限
exports.update = (project_id, member_id, role, cb)->
_connect.redis.hset HASHKEY, getKey(project_id, member_id), role, cb
#获取一个用户在某个项目中的权限
exports.get = (project_id, member_id, cb)->
return cb(null) if not project_id or not member_id
_connect.redis.hget HASHKEY, getKey(project_id, member_id), cb
#请求项目级的权限许可
#expectRole,期待
exports.permission = (project_id, member, expectRoles, cb)->
return cb(null, true) if member.role is 'a'
@get project_id, member.member_id, (err, role)->
return cb err, false if err or not role
allow = exports.roleValidate expectRoles, role
cb err, allow, role
#检查用户的权限是否合法
#rules: 规则,role:用户所拥有的权限
exports.roleValidate = (rules, role)->
!memberRoleInList(role, rules, false) and memberRoleInList(role, rules, true) | true | _common = require '../common'
_config = _common.config
_entity = require '../entity/project_member'
_connect = require './connect'
_async = require 'async'
HASHKEY = "#{_config.redis.unique}:PI:KEY:<KEY>END_PI"
getKey = (project_id, member_id)->
"role:#{project_id}:#{member_id}"
pushToRedis = (list, cb)->
index = 0
_async.whilst(
-> index < list.length
(done)->
row = list[index++]
return done(null) if not row.member_id
key = getKey row.project_id, row.member_id
_connect.redis.hset HASHKEY, key, row.role, done
cb
)
#检查用户的角色是否在某个角色列表里面
memberRoleInList = (role, list, isAllow)->
pattern = if isAllow then /(\+|^)([\w*]+)/ else /(-)([\w*]+)/
matches = list.match pattern
return false if not matches
exists = matches[2]
# *表示所有用户,但必需有一个角色存在
return (role and exists.indexOf('*') >= 0) or exists.indexOf(role) >= 0
exports.init = (cb)->
self = @
#_connect.redis.expire HASHKEY, 1000000
_connect.redis.select _config.redis.database || 0, -> self.loadAll ->
#加载所有用户的权限
exports.loadAll = (cb)->
#清除现有的
queue = []
queue.push(
(done)-> _connect.redis.del HASHKEY, (err)-> done err
)
queue.push(
(done)-> _entity.find null, done
)
queue.push(pushToRedis)
_async.waterfall queue, cb
#删除某个人的权限
exports.remove = (project_id, member_id, cb)->
_connect.redis.hdel HASHKEY, getKey(project_id, member_id)
#设置某个用户在项目中的权限
exports.update = (project_id, member_id, role, cb)->
_connect.redis.hset HASHKEY, getKey(project_id, member_id), role, cb
#获取一个用户在某个项目中的权限
exports.get = (project_id, member_id, cb)->
return cb(null) if not project_id or not member_id
_connect.redis.hget HASHKEY, getKey(project_id, member_id), cb
#请求项目级的权限许可
#expectRole,期待
exports.permission = (project_id, member, expectRoles, cb)->
return cb(null, true) if member.role is 'a'
@get project_id, member.member_id, (err, role)->
return cb err, false if err or not role
allow = exports.roleValidate expectRoles, role
cb err, allow, role
#检查用户的权限是否合法
#rules: 规则,role:用户所拥有的权限
exports.roleValidate = (rules, role)->
!memberRoleInList(role, rules, false) and memberRoleInList(role, rules, true) |
[
{
"context": "nInput = {value('name')}\n placeholder = 'Full Name'\n />\n <input\n type = 'text'\n ",
"end": 1270,
"score": 0.997536301612854,
"start": 1261,
"tag": "NAME",
"value": "Full Name"
}
] | test/index.cjsx | zouiteryoussef/react-credit-card-master | 240 |
React = require('react')
Card = require('../build/card.js')
lorem = '
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer nec odio. Praesent libero. Sed cursus ante dapibus diam. Sed nisi.
Nulla quis sem at nibh elementum imperdiet. Duis sagittis ipsum. Praesent
mauris. Fusce nec tellus sed augue semper porta. Mauris massa.
'
Card = React.render(
<Card shinyAfterBack = {lorem} />,
document.getElementById('container'))
focused = null
focus = (name)->
return ->
focused = name
Card.setProps focused:name
unfocus = (name)->
return ->
if focused is name
Card.setProps focused: null
value = (name)->
return (event)->
value = event.target.value
obj = {}
obj[name] = value
if name is 'expiry' and value.match /\//
obj[name] = value.replace('/', '')
Card.setProps obj
React.render(
<div className='form'>
<input
type = 'number'
onFocus = {focus('number')}
onBlur = {unfocus('number')}
onInput = {value('number')}
placeholder = '**** **** **** ****'
/>
<input
type = 'text'
onFocus = {focus('name')}
onBlur = {unfocus('name')}
onInput = {value('name')}
placeholder = 'Full Name'
/>
<input
type = 'text'
onFocus = {focus('expiry')}
onBlur = {unfocus('expiry')}
onInput = {value('expiry')}
placeholder = '**/**'
/>
<input
type = 'number'
onFocus = {focus('cvc')}
onBlur = {unfocus('cvc')}
onInput = {value('cvc')}
placeholder = '***'
/>
</div>
, document.getElementById('form'))
| 169445 |
React = require('react')
Card = require('../build/card.js')
lorem = '
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer nec odio. Praesent libero. Sed cursus ante dapibus diam. Sed nisi.
Nulla quis sem at nibh elementum imperdiet. Duis sagittis ipsum. Praesent
mauris. Fusce nec tellus sed augue semper porta. Mauris massa.
'
Card = React.render(
<Card shinyAfterBack = {lorem} />,
document.getElementById('container'))
focused = null
focus = (name)->
return ->
focused = name
Card.setProps focused:name
unfocus = (name)->
return ->
if focused is name
Card.setProps focused: null
value = (name)->
return (event)->
value = event.target.value
obj = {}
obj[name] = value
if name is 'expiry' and value.match /\//
obj[name] = value.replace('/', '')
Card.setProps obj
React.render(
<div className='form'>
<input
type = 'number'
onFocus = {focus('number')}
onBlur = {unfocus('number')}
onInput = {value('number')}
placeholder = '**** **** **** ****'
/>
<input
type = 'text'
onFocus = {focus('name')}
onBlur = {unfocus('name')}
onInput = {value('name')}
placeholder = '<NAME>'
/>
<input
type = 'text'
onFocus = {focus('expiry')}
onBlur = {unfocus('expiry')}
onInput = {value('expiry')}
placeholder = '**/**'
/>
<input
type = 'number'
onFocus = {focus('cvc')}
onBlur = {unfocus('cvc')}
onInput = {value('cvc')}
placeholder = '***'
/>
</div>
, document.getElementById('form'))
| true |
React = require('react')
Card = require('../build/card.js')
lorem = '
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer nec odio. Praesent libero. Sed cursus ante dapibus diam. Sed nisi.
Nulla quis sem at nibh elementum imperdiet. Duis sagittis ipsum. Praesent
mauris. Fusce nec tellus sed augue semper porta. Mauris massa.
'
Card = React.render(
<Card shinyAfterBack = {lorem} />,
document.getElementById('container'))
focused = null
focus = (name)->
return ->
focused = name
Card.setProps focused:name
unfocus = (name)->
return ->
if focused is name
Card.setProps focused: null
value = (name)->
return (event)->
value = event.target.value
obj = {}
obj[name] = value
if name is 'expiry' and value.match /\//
obj[name] = value.replace('/', '')
Card.setProps obj
React.render(
<div className='form'>
<input
type = 'number'
onFocus = {focus('number')}
onBlur = {unfocus('number')}
onInput = {value('number')}
placeholder = '**** **** **** ****'
/>
<input
type = 'text'
onFocus = {focus('name')}
onBlur = {unfocus('name')}
onInput = {value('name')}
placeholder = 'PI:NAME:<NAME>END_PI'
/>
<input
type = 'text'
onFocus = {focus('expiry')}
onBlur = {unfocus('expiry')}
onInput = {value('expiry')}
placeholder = '**/**'
/>
<input
type = 'number'
onFocus = {focus('cvc')}
onBlur = {unfocus('cvc')}
onInput = {value('cvc')}
placeholder = '***'
/>
</div>
, document.getElementById('form'))
|
[
{
"context": "s not valid\").isEmail()\n req.assert(\"password\", \"Password cannot be blank\").notEmpty()\n validationErrors = req.validationE",
"end": 707,
"score": 0.8328497409820557,
"start": 683,
"tag": "PASSWORD",
"value": "Password cannot be blank"
},
{
"context": "{name: re... | development/server/controllers/user.coffee | SwingDev/MEAN-starter | 0 | _ = require("lodash")
async = require("async")
crypto = require("crypto")
nodemailer = require("nodemailer")
passport = require("passport")
User = require("../models/User")
config = require("../config/config")
mailer = require("./mailer")
swig = require('swig')
path = require("path")
UserNotFoundError = (message) ->
@name = "UserNotFoundError"
@message = (message || "")
return
UserNotFoundError.prototype = new Error()
###
POST /signin/
Sign in using email and password.
@param email
@param password
###
exports.postLogin = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "Password cannot be blank").notEmpty()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {'validationErrors': validationErrors})
return
passport.authenticate("local", (err, user, info) ->
return next(err) if err
unless user
res.json(403, {"error": info.message})
return
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"user": user})
return
return
) req, res, next
return
###
GET /current/
###
exports.isLoggedIn = (req, res, next) ->
if req.isAuthenticated()
res.json(200, {"user": req.user})
else
res.json(403, {})
###
POST /signout/
Log out.
###
exports.logout = (req, res) ->
req.logout()
res.json(200, {"message": "Logged out"})
return
###
POST /signup/
Create a new local account.
@param email
@param password
###
exports.postSignup = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "Password must be at least 4 characters long").len 4
validationErrors = req.validationErrors()
if validationErrors
console.error(validationErrors)
res.json(400, {"validationErrors": validationErrors})
return
user = new User(
email: req.body.email
profile: {name: req.body.name || ""}
password: req.body.password
)
User.findOne
email: req.body.email
, (err, existingUser) ->
if existingUser
console.error("Account with that email address already exists: " + req.body.email)
res.json(400, {"validationErrors": "Account with that email address already exists: " + req.body.email})
return
# req.flash "errors",
# msg: "Account with that email address already exists."
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(202, {"info": "User " + user.email + " created."})
return
return
return
return
###
POST /forgot/
Create a random token, then the send user an email with a reset link.
@param email
###
exports.postForgot = (req, res, next) ->
req.assert("email", "Please enter a valid email address.").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
async.waterfall [
(done) ->
crypto.randomBytes 16, (err, buf) ->
token = buf.toString("hex")
done err, token
return
(token, done) ->
User.findOne email: req.body.email.toLowerCase()
, (err, user) ->
if not user
done(new UserNotFoundError("Can't find email: " + req.body.email))
else
user.resetPasswordToken = token
user.resetPasswordExpires = Date.now() + 3600000 # 1 hour
user.save (err) ->
done err, token, user
return
return
(token, user, done) ->
if not (process.env.NODE_ENV in ['dev', 'test'])
smtpTransport = mailer.createSmtpTransport()
mailOptions =
to: user.email
from: config.mailer.defaulFromAddress
subject: "Reset your password "
text: swig.compileFile(path.join(__dirname, '../views/email/forgot/text.swig'))({'reset_url': req.host + '/reset_password', 'token': token})
smtpTransport.sendMail mailOptions, (err) ->
done err, token
else
done null, token
], (err, token) ->
if err instanceof UserNotFoundError
res.json(404, {"error": err.message})
return
else if err then return next(err)
if process.env.NODE_ENV == "test"
res.json(200, {"message": "Password reset email sent to " + req.body.email, "token": token})
else
console.log("Token: " + token)
res.json(200, {"message": "Password reset email sent to " + req.body.email})
return
return
###
POST /reset/
Process the reset password request.
@param token
@param password
###
exports.postReset = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long.").len 4
req.assert("token", "Token can't be empty").len 1
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
User.findOne(resetPasswordToken: req.body.token).where("resetPasswordExpires").gt(Date.now()).exec (err, user) ->
return next(err) if err
if not user
res.json(404, {error: "Can't find token: " + req.body.token})
return
else
user.password = req.body.password
user.resetPasswordToken = `undefined`
user.resetPasswordExpires = `undefined`
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"message": "Password updated."})
return
return
return
return
exports.getUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to get").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email
res.json(200, {"user": req.user})
return
if req.user.isAdmin
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
if user
res.json(200, {"user": user})
else
res.json(404, {error: "Can't find user with email: " + req.params.email})
return
res.json(403, {})
exports.patchUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to change").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
if 'isAdmin' of req.body and not req.user.isAdmin
return res.json(403, {"error": "Only admin can make new admins."})
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
return res.json(404, {error: "Can't find user with email: " + req.params.email}) if not user
user.updateDocument req.body
, (err, user) ->
return next(err) if err
return res.json(200, {"user": user})
return
res.json(403, {})
exports.deleteUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to remove").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
User.remove {email: req.params.email}, (err) ->
return next(err) if err
req.logout() if not req.user.isAdmin
res.json(200, {"message": "Account " + req.params.email + " has been removed."})
return
res.json(403, {})
###
~~~~~~~~~~~~~~~~~~~~ Changed to API until this point ~~~~~~~~~~~~~~~~~~~~~~~
###
###
POST /account/profile
Update profile information.
###
exports.postUpdateProfile = (req, res, next) ->
User.findById req.user.id, (err, user) ->
return next(err) if err
user.email = req.body.email or ""
user.profile.name = req.body.name or ""
user.profile.gender = req.body.gender or ""
user.profile.location = req.body.location or ""
user.profile.website = req.body.website or ""
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Profile information updated."
res.redirect "/account"
return
return
return
###
POST /account/password
Update current password.
@param password
###
exports.postUpdatePassword = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long").len 4
req.assert("confirmPassword", "Passwords do not match").equals req.body.password
errors = req.validationErrors()
if errors
req.flash "errors", errors
return res.redirect("/account")
User.findById req.user.id, (err, user) ->
return next(err) if err
user.password = req.body.password
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Password has been changed."
res.redirect "/account"
return
return
return
###
GET /account/unlink/:provider
Unlink OAuth provider.
@param provider
###
exports.getOauthUnlink = (req, res, next) ->
provider = req.params.provider
User.findById req.user.id, (err, user) ->
return next(err) if err
user[provider] = `undefined`
user.tokens = _.reject(user.tokens, (token) ->
token.kind is provider
)
user.save (err) ->
return next(err) if err
req.flash "info",
msg: provider + " account has been unlinked."
res.redirect "/account"
return
return
return
| 178574 | _ = require("lodash")
async = require("async")
crypto = require("crypto")
nodemailer = require("nodemailer")
passport = require("passport")
User = require("../models/User")
config = require("../config/config")
mailer = require("./mailer")
swig = require('swig')
path = require("path")
UserNotFoundError = (message) ->
@name = "UserNotFoundError"
@message = (message || "")
return
UserNotFoundError.prototype = new Error()
###
POST /signin/
Sign in using email and password.
@param email
@param password
###
exports.postLogin = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "<PASSWORD>").notEmpty()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {'validationErrors': validationErrors})
return
passport.authenticate("local", (err, user, info) ->
return next(err) if err
unless user
res.json(403, {"error": info.message})
return
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"user": user})
return
return
) req, res, next
return
###
GET /current/
###
exports.isLoggedIn = (req, res, next) ->
if req.isAuthenticated()
res.json(200, {"user": req.user})
else
res.json(403, {})
###
POST /signout/
Log out.
###
exports.logout = (req, res) ->
req.logout()
res.json(200, {"message": "Logged out"})
return
###
POST /signup/
Create a new local account.
@param email
@param password
###
exports.postSignup = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "Password must be at least 4 characters long").len 4
validationErrors = req.validationErrors()
if validationErrors
console.error(validationErrors)
res.json(400, {"validationErrors": validationErrors})
return
user = new User(
email: req.body.email
profile: {name: req.body.name || ""}
password: req.body.<PASSWORD>
)
User.findOne
email: req.body.email
, (err, existingUser) ->
if existingUser
console.error("Account with that email address already exists: " + req.body.email)
res.json(400, {"validationErrors": "Account with that email address already exists: " + req.body.email})
return
# req.flash "errors",
# msg: "Account with that email address already exists."
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(202, {"info": "User " + user.email + " created."})
return
return
return
return
###
POST /forgot/
Create a random token, then the send user an email with a reset link.
@param email
###
exports.postForgot = (req, res, next) ->
req.assert("email", "Please enter a valid email address.").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
async.waterfall [
(done) ->
crypto.randomBytes 16, (err, buf) ->
token = buf.toString("hex")
done err, token
return
(token, done) ->
User.findOne email: req.body.email.toLowerCase()
, (err, user) ->
if not user
done(new UserNotFoundError("Can't find email: " + req.body.email))
else
user.resetPasswordToken = <PASSWORD>
user.resetPasswordExpires = Date.now() + 3600000 # 1 hour
user.save (err) ->
done err, token, user
return
return
(token, user, done) ->
if not (process.env.NODE_ENV in ['dev', 'test'])
smtpTransport = mailer.createSmtpTransport()
mailOptions =
to: user.email
from: config.mailer.defaulFromAddress
subject: "Reset your password "
text: swig.compileFile(path.join(__dirname, '../views/email/forgot/text.swig'))({'reset_url': req.host + '/reset_password', 'token': token})
smtpTransport.sendMail mailOptions, (err) ->
done err, token
else
done null, token
], (err, token) ->
if err instanceof UserNotFoundError
res.json(404, {"error": err.message})
return
else if err then return next(err)
if process.env.NODE_ENV == "test"
res.json(200, {"message": "Password reset email sent to " + req.body.email, "token": token})
else
console.log("Token: " + token)
res.json(200, {"message": "Password reset email sent to " + req.body.email})
return
return
###
POST /reset/
Process the reset password request.
@param token
@param password
###
exports.postReset = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long.").len 4
req.assert("token", "Token can't be empty").len 1
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
User.findOne(resetPasswordToken: req.body.token).where("resetPasswordExpires").gt(Date.now()).exec (err, user) ->
return next(err) if err
if not user
res.json(404, {error: "Can't find token: " + req.body.token})
return
else
user.password = <PASSWORD>
user.resetPasswordToken = `<PASSWORD>`
user.resetPasswordExpires = `<PASSWORD>`
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"message": "Password updated."})
return
return
return
return
exports.getUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to get").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email
res.json(200, {"user": req.user})
return
if req.user.isAdmin
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
if user
res.json(200, {"user": user})
else
res.json(404, {error: "Can't find user with email: " + req.params.email})
return
res.json(403, {})
exports.patchUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to change").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
if 'isAdmin' of req.body and not req.user.isAdmin
return res.json(403, {"error": "Only admin can make new admins."})
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
return res.json(404, {error: "Can't find user with email: " + req.params.email}) if not user
user.updateDocument req.body
, (err, user) ->
return next(err) if err
return res.json(200, {"user": user})
return
res.json(403, {})
exports.deleteUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to remove").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
User.remove {email: req.params.email}, (err) ->
return next(err) if err
req.logout() if not req.user.isAdmin
res.json(200, {"message": "Account " + req.params.email + " has been removed."})
return
res.json(403, {})
###
~~~~~~~~~~~~~~~~~~~~ Changed to API until this point ~~~~~~~~~~~~~~~~~~~~~~~
###
###
POST /account/profile
Update profile information.
###
exports.postUpdateProfile = (req, res, next) ->
User.findById req.user.id, (err, user) ->
return next(err) if err
user.email = req.body.email or ""
user.profile.name = req.body.name or ""
user.profile.gender = req.body.gender or ""
user.profile.location = req.body.location or ""
user.profile.website = req.body.website or ""
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Profile information updated."
res.redirect "/account"
return
return
return
###
POST /account/password
Update current password.
@param password
###
exports.postUpdatePassword = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long").len 4
req.assert("confirmPassword", "Passwords do not match").equals req.body.password
errors = req.validationErrors()
if errors
req.flash "errors", errors
return res.redirect("/account")
User.findById req.user.id, (err, user) ->
return next(err) if err
user.password = <PASSWORD>
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Password has been changed."
res.redirect "/account"
return
return
return
###
GET /account/unlink/:provider
Unlink OAuth provider.
@param provider
###
exports.getOauthUnlink = (req, res, next) ->
provider = req.params.provider
User.findById req.user.id, (err, user) ->
return next(err) if err
user[provider] = `undefined`
user.tokens = _.reject(user.tokens, (token) ->
token.kind is provider
)
user.save (err) ->
return next(err) if err
req.flash "info",
msg: provider + " account has been unlinked."
res.redirect "/account"
return
return
return
| true | _ = require("lodash")
async = require("async")
crypto = require("crypto")
nodemailer = require("nodemailer")
passport = require("passport")
User = require("../models/User")
config = require("../config/config")
mailer = require("./mailer")
swig = require('swig')
path = require("path")
UserNotFoundError = (message) ->
@name = "UserNotFoundError"
@message = (message || "")
return
UserNotFoundError.prototype = new Error()
###
POST /signin/
Sign in using email and password.
@param email
@param password
###
exports.postLogin = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "PI:PASSWORD:<PASSWORD>END_PI").notEmpty()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {'validationErrors': validationErrors})
return
passport.authenticate("local", (err, user, info) ->
return next(err) if err
unless user
res.json(403, {"error": info.message})
return
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"user": user})
return
return
) req, res, next
return
###
GET /current/
###
exports.isLoggedIn = (req, res, next) ->
if req.isAuthenticated()
res.json(200, {"user": req.user})
else
res.json(403, {})
###
POST /signout/
Log out.
###
exports.logout = (req, res) ->
req.logout()
res.json(200, {"message": "Logged out"})
return
###
POST /signup/
Create a new local account.
@param email
@param password
###
exports.postSignup = (req, res, next) ->
req.assert("email", "Email is not valid").isEmail()
req.assert("password", "Password must be at least 4 characters long").len 4
validationErrors = req.validationErrors()
if validationErrors
console.error(validationErrors)
res.json(400, {"validationErrors": validationErrors})
return
user = new User(
email: req.body.email
profile: {name: req.body.name || ""}
password: req.body.PI:PASSWORD:<PASSWORD>END_PI
)
User.findOne
email: req.body.email
, (err, existingUser) ->
if existingUser
console.error("Account with that email address already exists: " + req.body.email)
res.json(400, {"validationErrors": "Account with that email address already exists: " + req.body.email})
return
# req.flash "errors",
# msg: "Account with that email address already exists."
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(202, {"info": "User " + user.email + " created."})
return
return
return
return
###
POST /forgot/
Create a random token, then the send user an email with a reset link.
@param email
###
exports.postForgot = (req, res, next) ->
req.assert("email", "Please enter a valid email address.").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
async.waterfall [
(done) ->
crypto.randomBytes 16, (err, buf) ->
token = buf.toString("hex")
done err, token
return
(token, done) ->
User.findOne email: req.body.email.toLowerCase()
, (err, user) ->
if not user
done(new UserNotFoundError("Can't find email: " + req.body.email))
else
user.resetPasswordToken = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordExpires = Date.now() + 3600000 # 1 hour
user.save (err) ->
done err, token, user
return
return
(token, user, done) ->
if not (process.env.NODE_ENV in ['dev', 'test'])
smtpTransport = mailer.createSmtpTransport()
mailOptions =
to: user.email
from: config.mailer.defaulFromAddress
subject: "Reset your password "
text: swig.compileFile(path.join(__dirname, '../views/email/forgot/text.swig'))({'reset_url': req.host + '/reset_password', 'token': token})
smtpTransport.sendMail mailOptions, (err) ->
done err, token
else
done null, token
], (err, token) ->
if err instanceof UserNotFoundError
res.json(404, {"error": err.message})
return
else if err then return next(err)
if process.env.NODE_ENV == "test"
res.json(200, {"message": "Password reset email sent to " + req.body.email, "token": token})
else
console.log("Token: " + token)
res.json(200, {"message": "Password reset email sent to " + req.body.email})
return
return
###
POST /reset/
Process the reset password request.
@param token
@param password
###
exports.postReset = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long.").len 4
req.assert("token", "Token can't be empty").len 1
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
User.findOne(resetPasswordToken: req.body.token).where("resetPasswordExpires").gt(Date.now()).exec (err, user) ->
return next(err) if err
if not user
res.json(404, {error: "Can't find token: " + req.body.token})
return
else
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordToken = `PI:PASSWORD:<PASSWORD>END_PI`
user.resetPasswordExpires = `PI:PASSWORD:<PASSWORD>END_PI`
user.save (err) ->
return next(err) if err
req.logIn user, (err) ->
return next(err) if err
res.json(200, {"message": "Password updated."})
return
return
return
return
exports.getUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to get").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email
res.json(200, {"user": req.user})
return
if req.user.isAdmin
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
if user
res.json(200, {"user": user})
else
res.json(404, {error: "Can't find user with email: " + req.params.email})
return
res.json(403, {})
exports.patchUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to change").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
if 'isAdmin' of req.body and not req.user.isAdmin
return res.json(403, {"error": "Only admin can make new admins."})
User.findOne {email: req.params.email}, (err, user) ->
return next(err) if err
return res.json(404, {error: "Can't find user with email: " + req.params.email}) if not user
user.updateDocument req.body
, (err, user) ->
return next(err) if err
return res.json(200, {"user": user})
return
res.json(403, {})
exports.deleteUser = (req, res, next) ->
req.assert("email", "You need to say email of the user you want to remove").isEmail()
validationErrors = req.validationErrors()
if validationErrors
res.json(400, {"validationErrors": validationErrors})
return
if req.isAuthenticated()
if req.user.email == req.params.email or req.user.isAdmin
User.remove {email: req.params.email}, (err) ->
return next(err) if err
req.logout() if not req.user.isAdmin
res.json(200, {"message": "Account " + req.params.email + " has been removed."})
return
res.json(403, {})
###
~~~~~~~~~~~~~~~~~~~~ Changed to API until this point ~~~~~~~~~~~~~~~~~~~~~~~
###
###
POST /account/profile
Update profile information.
###
exports.postUpdateProfile = (req, res, next) ->
User.findById req.user.id, (err, user) ->
return next(err) if err
user.email = req.body.email or ""
user.profile.name = req.body.name or ""
user.profile.gender = req.body.gender or ""
user.profile.location = req.body.location or ""
user.profile.website = req.body.website or ""
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Profile information updated."
res.redirect "/account"
return
return
return
###
POST /account/password
Update current password.
@param password
###
exports.postUpdatePassword = (req, res, next) ->
req.assert("password", "Password must be at least 4 characters long").len 4
req.assert("confirmPassword", "Passwords do not match").equals req.body.password
errors = req.validationErrors()
if errors
req.flash "errors", errors
return res.redirect("/account")
User.findById req.user.id, (err, user) ->
return next(err) if err
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.save (err) ->
return next(err) if err
req.flash "success",
msg: "Password has been changed."
res.redirect "/account"
return
return
return
###
GET /account/unlink/:provider
Unlink OAuth provider.
@param provider
###
exports.getOauthUnlink = (req, res, next) ->
provider = req.params.provider
User.findById req.user.id, (err, user) ->
return next(err) if err
user[provider] = `undefined`
user.tokens = _.reject(user.tokens, (token) ->
token.kind is provider
)
user.save (err) ->
return next(err) if err
req.flash "info",
msg: provider + " account has been unlinked."
res.redirect "/account"
return
return
return
|
[
{
"context": "# TODO: s/ECMA/Java/ once finished\nname: \"JSX\"\nscopeName: \"source.ecmascript.jsx\"\nfileTypes: [\n",
"end": 45,
"score": 0.875259280204773,
"start": 42,
"tag": "NAME",
"value": "JSX"
}
] | grammars/jsx.cson | Alhadis/language-js | 4 | # TODO: s/ECMA/Java/ once finished
name: "JSX"
scopeName: "source.ecmascript.jsx"
fileTypes: [
".jsx"
".babel"
".flow"
]
firstLineMatch: """(?x)
# Babel pragma
( /[*]{2}\\s*@babel\\s*[*]/
| "use babel"
| 'use babel'
| /[*]\\s*@flow\\s*[*]/
)
|
# Modeline
(?i:
# Emacs
-\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*)
(?:js2-)?jsx
(?=[\\s;]|(?<![-*])-\\*-).*?-\\*-
|
# Vim
(?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s* set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*=
(?:javascript\.)?jsx
(?=\\s|:|$)
)
"""
patterns: [include: "source.ecmascript"]
| 90300 | # TODO: s/ECMA/Java/ once finished
name: "<NAME>"
scopeName: "source.ecmascript.jsx"
fileTypes: [
".jsx"
".babel"
".flow"
]
firstLineMatch: """(?x)
# Babel pragma
( /[*]{2}\\s*@babel\\s*[*]/
| "use babel"
| 'use babel'
| /[*]\\s*@flow\\s*[*]/
)
|
# Modeline
(?i:
# Emacs
-\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*)
(?:js2-)?jsx
(?=[\\s;]|(?<![-*])-\\*-).*?-\\*-
|
# Vim
(?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s* set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*=
(?:javascript\.)?jsx
(?=\\s|:|$)
)
"""
patterns: [include: "source.ecmascript"]
| true | # TODO: s/ECMA/Java/ once finished
name: "PI:NAME:<NAME>END_PI"
scopeName: "source.ecmascript.jsx"
fileTypes: [
".jsx"
".babel"
".flow"
]
firstLineMatch: """(?x)
# Babel pragma
( /[*]{2}\\s*@babel\\s*[*]/
| "use babel"
| 'use babel'
| /[*]\\s*@flow\\s*[*]/
)
|
# Modeline
(?i:
# Emacs
-\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*)
(?:js2-)?jsx
(?=[\\s;]|(?<![-*])-\\*-).*?-\\*-
|
# Vim
(?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s* set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*=
(?:javascript\.)?jsx
(?=\\s|:|$)
)
"""
patterns: [include: "source.ecmascript"]
|
[
{
"context": " creatorLabel = @game.add.text(0, 48, 'Made by: Andrew O.', {font: '48px VT323', fill: '#fff'})\n creato",
"end": 151,
"score": 0.9991275072097778,
"start": 143,
"tag": "NAME",
"value": "Andrew O"
},
{
"context": " creatorSubLabel = @game.add.text(0, 88, 'Twitter:... | scripts/coffee/credits.coffee | oppenlander/flappy-donk | 0 | BaseState = require './basestate'
class Credits extends BaseState
create: ->
super
creatorLabel = @game.add.text(0, 48, 'Made by: Andrew O.', {font: '48px VT323', fill: '#fff'})
creatorLabel.x = @gw/2 - creatorLabel._width/2
creatorSubLabel = @game.add.text(0, 88, 'Twitter: usagimaru57', {font: '32px VT323', fill: '#fff'})
creatorSubLabel.x = @gw/2 - creatorSubLabel._width/2
creatorSubSubLabel = @game.add.text(0, 118, 'GitHub: oppenlander', {font: '32px VT323', fill: '#fff'})
creatorSubSubLabel.x = @gw/2 - creatorSubSubLabel._width/2
musicLabel = @game.add.text(0, @gh/2+70, 'Music: Rocco Wouters', {font: '32px VT323', fill: '#fff'})
musicLabel.x = @gw/2 - musicLabel._width/2
explosionLabel = @game.add.text(0, @gh/2+110, 'SFX: SoundBible.com', {font: '32px VT323', fill: '#fff'})
explosionLabel.x = @gw/2 - explosionLabel._width/2
startLabel = @game.add.text(10, @gh-58, 'Start: SPACE', {font: '16px VT323', fill: '#fff'})
menuLabel = @game.add.text(0, @gh-58, 'Menu/Character Select: M', {font: '16px VT323', fill: '#fff'})
menuLabel.x = @gw - (menuLabel._width + 10)
@createIdle()
update: ->
super
@player.idle()
module.exports = Credits
| 175473 | BaseState = require './basestate'
class Credits extends BaseState
create: ->
super
creatorLabel = @game.add.text(0, 48, 'Made by: <NAME>.', {font: '48px VT323', fill: '#fff'})
creatorLabel.x = @gw/2 - creatorLabel._width/2
creatorSubLabel = @game.add.text(0, 88, 'Twitter: usagimaru57', {font: '32px VT323', fill: '#fff'})
creatorSubLabel.x = @gw/2 - creatorSubLabel._width/2
creatorSubSubLabel = @game.add.text(0, 118, 'GitHub: oppenlander', {font: '32px VT323', fill: '#fff'})
creatorSubSubLabel.x = @gw/2 - creatorSubSubLabel._width/2
musicLabel = @game.add.text(0, @gh/2+70, 'Music: Rocco Wouters', {font: '32px VT323', fill: '#fff'})
musicLabel.x = @gw/2 - musicLabel._width/2
explosionLabel = @game.add.text(0, @gh/2+110, 'SFX: SoundBible.com', {font: '32px VT323', fill: '#fff'})
explosionLabel.x = @gw/2 - explosionLabel._width/2
startLabel = @game.add.text(10, @gh-58, 'Start: SPACE', {font: '16px VT323', fill: '#fff'})
menuLabel = @game.add.text(0, @gh-58, 'Menu/Character Select: M', {font: '16px VT323', fill: '#fff'})
menuLabel.x = @gw - (menuLabel._width + 10)
@createIdle()
update: ->
super
@player.idle()
module.exports = Credits
| true | BaseState = require './basestate'
class Credits extends BaseState
create: ->
super
creatorLabel = @game.add.text(0, 48, 'Made by: PI:NAME:<NAME>END_PI.', {font: '48px VT323', fill: '#fff'})
creatorLabel.x = @gw/2 - creatorLabel._width/2
creatorSubLabel = @game.add.text(0, 88, 'Twitter: usagimaru57', {font: '32px VT323', fill: '#fff'})
creatorSubLabel.x = @gw/2 - creatorSubLabel._width/2
creatorSubSubLabel = @game.add.text(0, 118, 'GitHub: oppenlander', {font: '32px VT323', fill: '#fff'})
creatorSubSubLabel.x = @gw/2 - creatorSubSubLabel._width/2
musicLabel = @game.add.text(0, @gh/2+70, 'Music: Rocco Wouters', {font: '32px VT323', fill: '#fff'})
musicLabel.x = @gw/2 - musicLabel._width/2
explosionLabel = @game.add.text(0, @gh/2+110, 'SFX: SoundBible.com', {font: '32px VT323', fill: '#fff'})
explosionLabel.x = @gw/2 - explosionLabel._width/2
startLabel = @game.add.text(10, @gh-58, 'Start: SPACE', {font: '16px VT323', fill: '#fff'})
menuLabel = @game.add.text(0, @gh-58, 'Menu/Character Select: M', {font: '16px VT323', fill: '#fff'})
menuLabel.x = @gw - (menuLabel._width + 10)
@createIdle()
update: ->
super
@player.idle()
module.exports = Credits
|
[
{
"context": "***\n# JSListView list view manage class\n# Coded by Hajime Oh-yake 2013.04.02\n#*************************************",
"end": 104,
"score": 0.9998912811279297,
"start": 90,
"tag": "NAME",
"value": "Hajime Oh-yake"
}
] | JSKit/04_JSListView.coffee | digitarhythm/codeJS | 0 | #*****************************************
# JSListView list view manage class
# Coded by Hajime Oh-yake 2013.04.02
#*****************************************
class JSListView extends JSScrollView
constructor:(frame)->
super(frame)
@_listData = null
@_orglist = null
@_clickaction = null
@_dblclickaction = null
@_style = "JSListStyleStandard"
@_textSize = 12
@_select = -1
@_clipToBounds = true
#@_scroll = true
@_delegate = @
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").width(frame.size.width)
$(@_viewSelector+"_select").height(frame.size.height)
setListData:(list)->
@_orglist = list
switch @_style
when "JSListStyleStandard", "JSListStyleDropdown"
if (@_style=="JSListStyleStandard")
size = 2
@_listData = list
else
size = 1
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
@_tag = "<select id='"+@_objectID+"_select' size='"+size+"' style='width:"+(@_frame.size.width)+"px;height:"+(@_frame.size.height)+"px;z-index:1;'>"
if (!@_listData?)
@_listData = new Array()
for i in [0...@_listData.length]
value = @_listData[i]
disp = JSEscape(value)
@_tag += "<option id='"+i+"' value='"+i+"'>"+disp+"</option>"
@_tag += "</select>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").css("background-color", "clearColor")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", @_textSize)
if (@_style=="JSListStyleStandard")
#$(@_viewSelector+"_select").click (e) =>
$(@_viewSelector+"_select").on 'click', (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
#$(@_viewSelector+"_select").dblclick (e) =>
$(@_viewSelector+"_select").on "dblclick", (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_dblclickaction? && @_select?)
@_dblclickaction(@_select)
else
$(@_viewSelector+"_select").change (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
when "JSListStyleSortable"
@_tag = "<table style='width:100%;'><tbody id='"+@_objectID+"_select'>"
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
for i in [0...@_listData.length]
disp = JSEscape(@_listData[i])
@_tag += "<tr id='"+i+"' class='ui-state-default' style='width:100%;z-index:1;'><td>"+disp+"</td></tr>"
@_tag += "</tbody></table>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").sortable
placeholder: "ui-sortable-placeholder"
distance: 3
opacity:0.8
scroll: false
update: (event, ui)=>
@sortReflection()
if (typeof @_delegate.sortUpdate == 'function')
@_delegate.sortUpdate(event, ui)
$(@_viewSelector+"_select").disableSelection()
$(@_viewSelector+"_select").css("background-color", "transparent")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", (@_textSize-4)+"pt")
$(@_viewSelector+"_select").width(@_frame.size.width+"px")
$(@_viewSelector+"_select").height(@_frame.size.height+"px")
count:->
return @_listData.length
objectAtIndex:(index)->
return @_listData[index]
indexOfObject:(target)->
num = @_listData.indexOf(target)
return num
getSelect:->
return @_select
setSelect:(@_select)->
$(@_viewSelector+"_select").val(@_select)
sortReflection:->
if (@_style == "JSListStyleSortable")
arr = $(@_viewSelector+"_select").sortable("toArray")
ret = []
for key, i in arr
ret[i] = @_orglist[key]
@_listData = ret
setTextSize:(@_textSize)->
if (@_listData?)
@setListData(@_listData)
addTarget:(action, tap = 1)->
if (tap == 1)
@_clickaction = action
else
@_dblclickaction = action
setStyle:(@_style)->
@setListData(@_listData)
reload:->
@setListData(@_listData)
viewDidAppear:->
super()
@setListData(@_listData)
@setSelect(@_select)
| 182688 | #*****************************************
# JSListView list view manage class
# Coded by <NAME> 2013.04.02
#*****************************************
class JSListView extends JSScrollView
constructor:(frame)->
super(frame)
@_listData = null
@_orglist = null
@_clickaction = null
@_dblclickaction = null
@_style = "JSListStyleStandard"
@_textSize = 12
@_select = -1
@_clipToBounds = true
#@_scroll = true
@_delegate = @
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").width(frame.size.width)
$(@_viewSelector+"_select").height(frame.size.height)
setListData:(list)->
@_orglist = list
switch @_style
when "JSListStyleStandard", "JSListStyleDropdown"
if (@_style=="JSListStyleStandard")
size = 2
@_listData = list
else
size = 1
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
@_tag = "<select id='"+@_objectID+"_select' size='"+size+"' style='width:"+(@_frame.size.width)+"px;height:"+(@_frame.size.height)+"px;z-index:1;'>"
if (!@_listData?)
@_listData = new Array()
for i in [0...@_listData.length]
value = @_listData[i]
disp = JSEscape(value)
@_tag += "<option id='"+i+"' value='"+i+"'>"+disp+"</option>"
@_tag += "</select>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").css("background-color", "clearColor")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", @_textSize)
if (@_style=="JSListStyleStandard")
#$(@_viewSelector+"_select").click (e) =>
$(@_viewSelector+"_select").on 'click', (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
#$(@_viewSelector+"_select").dblclick (e) =>
$(@_viewSelector+"_select").on "dblclick", (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_dblclickaction? && @_select?)
@_dblclickaction(@_select)
else
$(@_viewSelector+"_select").change (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
when "JSListStyleSortable"
@_tag = "<table style='width:100%;'><tbody id='"+@_objectID+"_select'>"
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
for i in [0...@_listData.length]
disp = JSEscape(@_listData[i])
@_tag += "<tr id='"+i+"' class='ui-state-default' style='width:100%;z-index:1;'><td>"+disp+"</td></tr>"
@_tag += "</tbody></table>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").sortable
placeholder: "ui-sortable-placeholder"
distance: 3
opacity:0.8
scroll: false
update: (event, ui)=>
@sortReflection()
if (typeof @_delegate.sortUpdate == 'function')
@_delegate.sortUpdate(event, ui)
$(@_viewSelector+"_select").disableSelection()
$(@_viewSelector+"_select").css("background-color", "transparent")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", (@_textSize-4)+"pt")
$(@_viewSelector+"_select").width(@_frame.size.width+"px")
$(@_viewSelector+"_select").height(@_frame.size.height+"px")
count:->
return @_listData.length
objectAtIndex:(index)->
return @_listData[index]
indexOfObject:(target)->
num = @_listData.indexOf(target)
return num
getSelect:->
return @_select
setSelect:(@_select)->
$(@_viewSelector+"_select").val(@_select)
sortReflection:->
if (@_style == "JSListStyleSortable")
arr = $(@_viewSelector+"_select").sortable("toArray")
ret = []
for key, i in arr
ret[i] = @_orglist[key]
@_listData = ret
setTextSize:(@_textSize)->
if (@_listData?)
@setListData(@_listData)
addTarget:(action, tap = 1)->
if (tap == 1)
@_clickaction = action
else
@_dblclickaction = action
setStyle:(@_style)->
@setListData(@_listData)
reload:->
@setListData(@_listData)
viewDidAppear:->
super()
@setListData(@_listData)
@setSelect(@_select)
| true | #*****************************************
# JSListView list view manage class
# Coded by PI:NAME:<NAME>END_PI 2013.04.02
#*****************************************
class JSListView extends JSScrollView
constructor:(frame)->
super(frame)
@_listData = null
@_orglist = null
@_clickaction = null
@_dblclickaction = null
@_style = "JSListStyleStandard"
@_textSize = 12
@_select = -1
@_clipToBounds = true
#@_scroll = true
@_delegate = @
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").width(frame.size.width)
$(@_viewSelector+"_select").height(frame.size.height)
setListData:(list)->
@_orglist = list
switch @_style
when "JSListStyleStandard", "JSListStyleDropdown"
if (@_style=="JSListStyleStandard")
size = 2
@_listData = list
else
size = 1
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
@_tag = "<select id='"+@_objectID+"_select' size='"+size+"' style='width:"+(@_frame.size.width)+"px;height:"+(@_frame.size.height)+"px;z-index:1;'>"
if (!@_listData?)
@_listData = new Array()
for i in [0...@_listData.length]
value = @_listData[i]
disp = JSEscape(value)
@_tag += "<option id='"+i+"' value='"+i+"'>"+disp+"</option>"
@_tag += "</select>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").css("background-color", "clearColor")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", @_textSize)
if (@_style=="JSListStyleStandard")
#$(@_viewSelector+"_select").click (e) =>
$(@_viewSelector+"_select").on 'click', (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
#$(@_viewSelector+"_select").dblclick (e) =>
$(@_viewSelector+"_select").on "dblclick", (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_dblclickaction? && @_select?)
@_dblclickaction(@_select)
else
$(@_viewSelector+"_select").change (e) =>
e.stopPropagation()
@_select = $(@_viewSelector+"_select option:selected").val()
if (@_clickaction? && @_select?)
@_clickaction(@_select)
when "JSListStyleSortable"
@_tag = "<table style='width:100%;'><tbody id='"+@_objectID+"_select'>"
if (!list?)
return
@_listData = new Array()
for item in list
@_listData.push(item)
for i in [0...@_listData.length]
disp = JSEscape(@_listData[i])
@_tag += "<tr id='"+i+"' class='ui-state-default' style='width:100%;z-index:1;'><td>"+disp+"</td></tr>"
@_tag += "</tbody></table>"
if ($(@_viewSelector+"_select").length)
$(@_viewSelector+"_select").remove()
$(@_viewSelector).append(@_tag)
$(@_viewSelector+"_select").sortable
placeholder: "ui-sortable-placeholder"
distance: 3
opacity:0.8
scroll: false
update: (event, ui)=>
@sortReflection()
if (typeof @_delegate.sortUpdate == 'function')
@_delegate.sortUpdate(event, ui)
$(@_viewSelector+"_select").disableSelection()
$(@_viewSelector+"_select").css("background-color", "transparent")
$(@_viewSelector+"_select").css("border", "0px transparent")
$(@_viewSelector+"_select").css("font-size", (@_textSize-4)+"pt")
$(@_viewSelector+"_select").width(@_frame.size.width+"px")
$(@_viewSelector+"_select").height(@_frame.size.height+"px")
count:->
return @_listData.length
objectAtIndex:(index)->
return @_listData[index]
indexOfObject:(target)->
num = @_listData.indexOf(target)
return num
getSelect:->
return @_select
setSelect:(@_select)->
$(@_viewSelector+"_select").val(@_select)
sortReflection:->
if (@_style == "JSListStyleSortable")
arr = $(@_viewSelector+"_select").sortable("toArray")
ret = []
for key, i in arr
ret[i] = @_orglist[key]
@_listData = ret
setTextSize:(@_textSize)->
if (@_listData?)
@setListData(@_listData)
addTarget:(action, tap = 1)->
if (tap == 1)
@_clickaction = action
else
@_dblclickaction = action
setStyle:(@_style)->
@setListData(@_listData)
reload:->
@setListData(@_listData)
viewDidAppear:->
super()
@setListData(@_listData)
@setSelect(@_select)
|
[
{
"context": "x =\n host: 'localhost'\n port: 8984\n username: 'admin'\n password: 'admin'\n database: 'letterdb'\n pat",
"end": 109,
"score": 0.9992070198059082,
"start": 104,
"tag": "USERNAME",
"value": "admin"
},
{
"context": "st'\n port: 8984\n username: 'admin'\n passwo... | config.coffee | nerdenough/letterdb | 0 | config =
title: 'LetterDB'
port: 3000
config.basex =
host: 'localhost'
port: 8984
username: 'admin'
password: 'admin'
database: 'letterdb'
path: '/rest/letterdb'
module.exports = config
| 158887 | config =
title: 'LetterDB'
port: 3000
config.basex =
host: 'localhost'
port: 8984
username: 'admin'
password: '<PASSWORD>'
database: 'letterdb'
path: '/rest/letterdb'
module.exports = config
| true | config =
title: 'LetterDB'
port: 3000
config.basex =
host: 'localhost'
port: 8984
username: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
database: 'letterdb'
path: '/rest/letterdb'
module.exports = config
|
[
{
"context": "The `Class` unit tests\n#\n# Copyright (C) 2011-2013 Nikolay Nemshilov\n#\n{Test,should} = require('lovely')\n\neval(Test.bu",
"end": 72,
"score": 0.9998873472213745,
"start": 55,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/core/test/class_test.coffee | lovely-io/lovely.io-stl | 2 | #
# The `Class` unit tests
#
# Copyright (C) 2011-2013 Nikolay Nemshilov
#
{Test,should} = require('lovely')
eval(Test.build)
Class = this.Lovely.Class
describe 'Class', ->
describe "new Class({..})", ->
Klass = new Class
getName: -> return this.name
setName: (name) -> this.name = name
it 'should be typeof a Function', ->
Klass.should.be.a 'function'
it 'should have those methods in its prototype', ->
Klass.prototype.getName.should.be.a 'function'
Klass.prototype.setName.should.be.a 'function'
it 'should refer to the class with prototype.constructor', ->
Klass.should.be.equal Klass.prototype.constructor
it 'should not have anything besides those names', ->
(key for key of Klass.prototype).should.eql ['constructor', 'getName', 'setName']
it 'should allow to make instances of it', ->
new Klass().should.be.instanceOf Klass
it 'should have those methods working', ->
klass = new Klass()
klass.setName 'boo-hoo'
klass.getName().should.eql 'boo-hoo'
it "should refere Lovely.Class as the default parent", ->
Klass.__super__.should.equal Class
new Klass().should.be.instanceOf Class
describe 'new Class({initialize: ...})', ->
Klass = new Class
constructor: (a, b) ->
this.name = a + '-' + b
this
it 'should call the constructor on the instance', ->
new Klass('boo', 'hoo').name.should.eql 'boo-hoo'
"should return the constructor's result if sent": ->
Klass = new Class constructor: -> ['some-other-data']
new Klass().should.eql ['some-other-data']
describe 'new Class() with a multilevel constructor inheritance', ->
Klass0 = new Class
constructor: -> this.property = 'value'
Klass1 = new Class Klass0,
method1: 'method1'
Klass2 = new Class Klass1,
method2: 'method2'
Klass3 = new Class Klass2,
method3: 'method3'
it "should handle the first level inheritance", ->
klass = new Klass1()
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
it "should handle the second level inheritance", ->
klass = new Klass2()
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
it "should handle the third level inheritance", ->
klass = new Klass3()
klass.should.be.instanceOf Klass3
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
klass.method3.should.eql 'method3'
describe 'new Class', ->
ParentKlass = new Class
constructor: -> this.prop = this.method(); this
method: (data) -> data || 'parent'
describe '\b(Parent)', ->
Klass = new Class(ParentKlass)
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.equal ParentKlass
it "should keep the parent's 'method'", ->
Klass.prototype.method.should.equal ParentKlass.prototype.method
describe '\b(Parent, {...})', ->
Klass = new Class ParentKlass, method: -> 'child'
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.be.equal ParentKlass
it "should replace the parent class method", ->
Klass.prototype.method.should.not.equal ParentKlass.prototype.method
it "should inherit the parent's class type", ->
klass = new Klass()
klass.should.be.instanceOf Klass
klass.should.be.instanceOf ParentKlass
it "should call this class methods", ->
new Klass().method().should.eql 'child'
describe '\b(Parent, {...}) and $super calls', ->
Klass = new Class ParentKlass,
method: -> this.$super('parent-data + ') + 'child-data'
it "should preform a proper super-call", ->
new Klass().method().should.eql 'parent-data + child-data'
describe '\b(Parent, {constructor: ->})', ->
Klass = new Class ParentKlass,
constructor: ->
this.prop = this.$super().prop + ' + constructor'
this
method: ->
this.$super() + ' + child'
it "should still refer the constructor to the correct class", ->
Klass.prototype.constructor.should.be.equal Klass
it "should correctly refer the __super__ property", ->
Klass.__super__.should.be.equal ParentKlass
it "should call everything in correct scopes", ->
klass = new Klass()
klass.prop.should.eql 'parent + child + constructor'
describe "class level attributes inheritance", ->
Klass1 = new Class
extend:
m1: {},
m2: []
Klass2 = new Class(Klass1)
it "should link all the class level attributes down to the subclass", ->
Klass2.m1.should.equal Klass1.m1
Klass2.m2.should.equal Klass1.m2
describe 'new Class() with shared modules', ->
ext = a: [], b: []
inc = c: [], d: []
Klass = new Class
include: inc
extend: ext
it "should extend the class-level with the 'extend' module", ->
Klass.a.should.be.equal ext.a
Klass.b.should.be.equal ext.b
Klass.should.not.have.keys(['c'])
Klass.should.not.have.keys(['d'])
it "should extend the prototype with the 'include' module", ->
Klass.prototype.c.should.be.equal inc.c
Klass.prototype.d.should.be.equal inc.d
Klass.prototype.should.not.have.keys(['a'])
Klass.prototype.should.not.have.keys(['b'])
describe "in class methods overloading", ->
Klass = new Class
method: -> "original"
Klass.include
method: -> this.$super() + "+overload"
it "should overload the method right in the class", ->
new Klass().method().should.be.eql "original+overload"
describe "\b.inherit() method", ->
Klass1 = new Class
method1: ->
Klass2 = Klass1.inherit
method2: ->
it "should create a new class", ->
Klass2.should.be.a 'function'
it "should link correctly the parent class", ->
Klass2.__super__.should.equal Klass1
it "should make a correctly instanceable class", ->
k = new Klass2()
k.should.be.instanceOf Klass2
k.should.be.instanceOf Klass1
describe "'whenInherited' callback", ->
Klass1 = new Class
whenInherited: (NewKlass)->
NewKlass.patched = true
NewKlass.context = this
Klass2 = new Class(Klass1)
it "should receive the new class for patches", ->
(Klass2.patched is true).should.be.true
it "should run the callback in the context of the parent class", ->
(Klass2.context is Klass1).should.be.true
| 72568 | #
# The `Class` unit tests
#
# Copyright (C) 2011-2013 <NAME>
#
{Test,should} = require('lovely')
eval(Test.build)
Class = this.Lovely.Class
describe 'Class', ->
describe "new Class({..})", ->
Klass = new Class
getName: -> return this.name
setName: (name) -> this.name = name
it 'should be typeof a Function', ->
Klass.should.be.a 'function'
it 'should have those methods in its prototype', ->
Klass.prototype.getName.should.be.a 'function'
Klass.prototype.setName.should.be.a 'function'
it 'should refer to the class with prototype.constructor', ->
Klass.should.be.equal Klass.prototype.constructor
it 'should not have anything besides those names', ->
(key for key of Klass.prototype).should.eql ['constructor', 'getName', 'setName']
it 'should allow to make instances of it', ->
new Klass().should.be.instanceOf Klass
it 'should have those methods working', ->
klass = new Klass()
klass.setName 'boo-hoo'
klass.getName().should.eql 'boo-hoo'
it "should refere Lovely.Class as the default parent", ->
Klass.__super__.should.equal Class
new Klass().should.be.instanceOf Class
describe 'new Class({initialize: ...})', ->
Klass = new Class
constructor: (a, b) ->
this.name = a + '-' + b
this
it 'should call the constructor on the instance', ->
new Klass('boo', 'hoo').name.should.eql 'boo-hoo'
"should return the constructor's result if sent": ->
Klass = new Class constructor: -> ['some-other-data']
new Klass().should.eql ['some-other-data']
describe 'new Class() with a multilevel constructor inheritance', ->
Klass0 = new Class
constructor: -> this.property = 'value'
Klass1 = new Class Klass0,
method1: 'method1'
Klass2 = new Class Klass1,
method2: 'method2'
Klass3 = new Class Klass2,
method3: 'method3'
it "should handle the first level inheritance", ->
klass = new Klass1()
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
it "should handle the second level inheritance", ->
klass = new Klass2()
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
it "should handle the third level inheritance", ->
klass = new Klass3()
klass.should.be.instanceOf Klass3
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
klass.method3.should.eql 'method3'
describe 'new Class', ->
ParentKlass = new Class
constructor: -> this.prop = this.method(); this
method: (data) -> data || 'parent'
describe '\b(Parent)', ->
Klass = new Class(ParentKlass)
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.equal ParentKlass
it "should keep the parent's 'method'", ->
Klass.prototype.method.should.equal ParentKlass.prototype.method
describe '\b(Parent, {...})', ->
Klass = new Class ParentKlass, method: -> 'child'
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.be.equal ParentKlass
it "should replace the parent class method", ->
Klass.prototype.method.should.not.equal ParentKlass.prototype.method
it "should inherit the parent's class type", ->
klass = new Klass()
klass.should.be.instanceOf Klass
klass.should.be.instanceOf ParentKlass
it "should call this class methods", ->
new Klass().method().should.eql 'child'
describe '\b(Parent, {...}) and $super calls', ->
Klass = new Class ParentKlass,
method: -> this.$super('parent-data + ') + 'child-data'
it "should preform a proper super-call", ->
new Klass().method().should.eql 'parent-data + child-data'
describe '\b(Parent, {constructor: ->})', ->
Klass = new Class ParentKlass,
constructor: ->
this.prop = this.$super().prop + ' + constructor'
this
method: ->
this.$super() + ' + child'
it "should still refer the constructor to the correct class", ->
Klass.prototype.constructor.should.be.equal Klass
it "should correctly refer the __super__ property", ->
Klass.__super__.should.be.equal ParentKlass
it "should call everything in correct scopes", ->
klass = new Klass()
klass.prop.should.eql 'parent + child + constructor'
describe "class level attributes inheritance", ->
Klass1 = new Class
extend:
m1: {},
m2: []
Klass2 = new Class(Klass1)
it "should link all the class level attributes down to the subclass", ->
Klass2.m1.should.equal Klass1.m1
Klass2.m2.should.equal Klass1.m2
describe 'new Class() with shared modules', ->
ext = a: [], b: []
inc = c: [], d: []
Klass = new Class
include: inc
extend: ext
it "should extend the class-level with the 'extend' module", ->
Klass.a.should.be.equal ext.a
Klass.b.should.be.equal ext.b
Klass.should.not.have.keys(['c'])
Klass.should.not.have.keys(['d'])
it "should extend the prototype with the 'include' module", ->
Klass.prototype.c.should.be.equal inc.c
Klass.prototype.d.should.be.equal inc.d
Klass.prototype.should.not.have.keys(['a'])
Klass.prototype.should.not.have.keys(['b'])
describe "in class methods overloading", ->
Klass = new Class
method: -> "original"
Klass.include
method: -> this.$super() + "+overload"
it "should overload the method right in the class", ->
new Klass().method().should.be.eql "original+overload"
describe "\b.inherit() method", ->
Klass1 = new Class
method1: ->
Klass2 = Klass1.inherit
method2: ->
it "should create a new class", ->
Klass2.should.be.a 'function'
it "should link correctly the parent class", ->
Klass2.__super__.should.equal Klass1
it "should make a correctly instanceable class", ->
k = new Klass2()
k.should.be.instanceOf Klass2
k.should.be.instanceOf Klass1
describe "'whenInherited' callback", ->
Klass1 = new Class
whenInherited: (NewKlass)->
NewKlass.patched = true
NewKlass.context = this
Klass2 = new Class(Klass1)
it "should receive the new class for patches", ->
(Klass2.patched is true).should.be.true
it "should run the callback in the context of the parent class", ->
(Klass2.context is Klass1).should.be.true
| true | #
# The `Class` unit tests
#
# Copyright (C) 2011-2013 PI:NAME:<NAME>END_PI
#
{Test,should} = require('lovely')
eval(Test.build)
Class = this.Lovely.Class
describe 'Class', ->
describe "new Class({..})", ->
Klass = new Class
getName: -> return this.name
setName: (name) -> this.name = name
it 'should be typeof a Function', ->
Klass.should.be.a 'function'
it 'should have those methods in its prototype', ->
Klass.prototype.getName.should.be.a 'function'
Klass.prototype.setName.should.be.a 'function'
it 'should refer to the class with prototype.constructor', ->
Klass.should.be.equal Klass.prototype.constructor
it 'should not have anything besides those names', ->
(key for key of Klass.prototype).should.eql ['constructor', 'getName', 'setName']
it 'should allow to make instances of it', ->
new Klass().should.be.instanceOf Klass
it 'should have those methods working', ->
klass = new Klass()
klass.setName 'boo-hoo'
klass.getName().should.eql 'boo-hoo'
it "should refere Lovely.Class as the default parent", ->
Klass.__super__.should.equal Class
new Klass().should.be.instanceOf Class
describe 'new Class({initialize: ...})', ->
Klass = new Class
constructor: (a, b) ->
this.name = a + '-' + b
this
it 'should call the constructor on the instance', ->
new Klass('boo', 'hoo').name.should.eql 'boo-hoo'
"should return the constructor's result if sent": ->
Klass = new Class constructor: -> ['some-other-data']
new Klass().should.eql ['some-other-data']
describe 'new Class() with a multilevel constructor inheritance', ->
Klass0 = new Class
constructor: -> this.property = 'value'
Klass1 = new Class Klass0,
method1: 'method1'
Klass2 = new Class Klass1,
method2: 'method2'
Klass3 = new Class Klass2,
method3: 'method3'
it "should handle the first level inheritance", ->
klass = new Klass1()
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
it "should handle the second level inheritance", ->
klass = new Klass2()
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
it "should handle the third level inheritance", ->
klass = new Klass3()
klass.should.be.instanceOf Klass3
klass.should.be.instanceOf Klass2
klass.should.be.instanceOf Klass1
klass.should.be.instanceOf Klass0
klass.property.should.eql 'value'
klass.method1.should.eql 'method1'
klass.method2.should.eql 'method2'
klass.method3.should.eql 'method3'
describe 'new Class', ->
ParentKlass = new Class
constructor: -> this.prop = this.method(); this
method: (data) -> data || 'parent'
describe '\b(Parent)', ->
Klass = new Class(ParentKlass)
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.equal ParentKlass
it "should keep the parent's 'method'", ->
Klass.prototype.method.should.equal ParentKlass.prototype.method
describe '\b(Parent, {...})', ->
Klass = new Class ParentKlass, method: -> 'child'
it "should refer '.__super__' to the parent class", ->
Klass.__super__.should.be.equal ParentKlass
it "should replace the parent class method", ->
Klass.prototype.method.should.not.equal ParentKlass.prototype.method
it "should inherit the parent's class type", ->
klass = new Klass()
klass.should.be.instanceOf Klass
klass.should.be.instanceOf ParentKlass
it "should call this class methods", ->
new Klass().method().should.eql 'child'
describe '\b(Parent, {...}) and $super calls', ->
Klass = new Class ParentKlass,
method: -> this.$super('parent-data + ') + 'child-data'
it "should preform a proper super-call", ->
new Klass().method().should.eql 'parent-data + child-data'
describe '\b(Parent, {constructor: ->})', ->
Klass = new Class ParentKlass,
constructor: ->
this.prop = this.$super().prop + ' + constructor'
this
method: ->
this.$super() + ' + child'
it "should still refer the constructor to the correct class", ->
Klass.prototype.constructor.should.be.equal Klass
it "should correctly refer the __super__ property", ->
Klass.__super__.should.be.equal ParentKlass
it "should call everything in correct scopes", ->
klass = new Klass()
klass.prop.should.eql 'parent + child + constructor'
describe "class level attributes inheritance", ->
Klass1 = new Class
extend:
m1: {},
m2: []
Klass2 = new Class(Klass1)
it "should link all the class level attributes down to the subclass", ->
Klass2.m1.should.equal Klass1.m1
Klass2.m2.should.equal Klass1.m2
describe 'new Class() with shared modules', ->
ext = a: [], b: []
inc = c: [], d: []
Klass = new Class
include: inc
extend: ext
it "should extend the class-level with the 'extend' module", ->
Klass.a.should.be.equal ext.a
Klass.b.should.be.equal ext.b
Klass.should.not.have.keys(['c'])
Klass.should.not.have.keys(['d'])
it "should extend the prototype with the 'include' module", ->
Klass.prototype.c.should.be.equal inc.c
Klass.prototype.d.should.be.equal inc.d
Klass.prototype.should.not.have.keys(['a'])
Klass.prototype.should.not.have.keys(['b'])
describe "in class methods overloading", ->
Klass = new Class
method: -> "original"
Klass.include
method: -> this.$super() + "+overload"
it "should overload the method right in the class", ->
new Klass().method().should.be.eql "original+overload"
describe "\b.inherit() method", ->
Klass1 = new Class
method1: ->
Klass2 = Klass1.inherit
method2: ->
it "should create a new class", ->
Klass2.should.be.a 'function'
it "should link correctly the parent class", ->
Klass2.__super__.should.equal Klass1
it "should make a correctly instanceable class", ->
k = new Klass2()
k.should.be.instanceOf Klass2
k.should.be.instanceOf Klass1
describe "'whenInherited' callback", ->
Klass1 = new Class
whenInherited: (NewKlass)->
NewKlass.patched = true
NewKlass.context = this
Klass2 = new Class(Klass1)
it "should receive the new class for patches", ->
(Klass2.patched is true).should.be.true
it "should run the callback in the context of the parent class", ->
(Klass2.context is Klass1).should.be.true
|
[
{
"context": "ool)\n\t\t\tif justCreated and R.tools[@name]?\n\t\t\t\tg[@Path.constructor.name] = @Path\n\t\t\t\tR.tools[@name].remove()\n\t\t\t\tdelete R",
"end": 2752,
"score": 0.9872232675552368,
"start": 2731,
"tag": "EMAIL",
"value": "Path.constructor.name"
},
{
"context": "lly)... | coffee/Tools/PathTool.coffee | arthursw/comme-un-dessein-client | 0 | define ['paper', 'R', 'Utils/Utils', 'Tools/Tool', 'UI/Button', 'i18next' ], (P, R, Utils, Tool, Button, i18next) ->
# PathTool: the mother class of all drawing tools
# doctodo: P.Path are created with three steps:
# - begin: initialize RPath: create the group, controlPath etc., and initialize the drawing
# - update: update the drawing
# - end: finish the drawing and finish RPath initialization
# doctodo: explain polygon mode
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
# begin, update, and end handlers emit the events to websocket
class PathTool extends Tool
@label = ''
@description = ''
@iconURL = ''
@buttonClasses = 'displayName btn-success'
@cursor =
position:
x: 0, y: 32
name: 'crosshair'
icon: if R.style == 'line' then 'mouse_draw' else null
@drawItems = true
@emitSocket = false
@maxDraftSize = 350
@computeDraftBounds: (paths=null)->
bounds = R.Drawing.getDraft()?.getBounds()
# console.log(bounds)
return bounds
@draftIsTooBig: (paths=null, tolerance=0)->
draftBounds = @computeDraftBounds(paths)
console.log(draftBounds)
return @draftBoundsIsTooBig(draftBounds, tolerance)
@draftBoundsIsTooBig: (draftBounds, tolerance=0)->
return draftBounds? and draftBounds.width > @maxDraftSize - tolerance or draftBounds.height > @maxDraftSize - tolerance
@displayDraftIsTooBigError: ()->
R.alertManager.alert 'Your drawing is too big', 'error'
return
# Find or create a button for the tool in the sidebar (if the button is created, add it default or favorite tool list depending on the user settings stored in local storage, and whether the tool was just created in a newly created script)
# set its name and icon if an icon url is provided, or create an icon with the letters of the name otherwise
# the icon will be made with the first two letters of the name if the name is in one word, or the first letter of each words of the name otherwise
# @param [RPath constructor] the RPath which will be created by this tool
# @param [Boolean] whether the tool was just created (with the code editor) or not
constructor: (@Path, justCreated=false) ->
@name = @Path.label
@constructor.label = @name
if @Path.description then @constructor.description = @Path.rdescription
if @Path.iconURL then @constructor.iconURL = @Path.iconURL
if @Path.category then @constructor.category = @Path.category
if @Path.cursor then @constructor.cursor = @Path.cursor
# delete tool if it already exists (when user creates a tool)
if justCreated and R.tools[@name]?
g[@Path.constructor.name] = @Path
R.tools[@name].remove()
delete R.tools[@name]
R.lastPathCreated = @Path
R.tools[@name] = @
# check if a button already exists (when created fom a module)
# @btnJ = R.sidebar.allToolsJ.find('li[data-name="'+@name+'"]')
@btnJ = R.sidebar.favoriteToolsJ.find('li[data-name="'+@name+'"]')
# create button only if it does not exist
super(@btnJ.length==0)
if justCreated
@select()
if not R.userAuthenticated?
R.toolManager.enableDrawingButton(false)
return
# Remove tool button, useful when user create a tool which already existed (overwrite the tool)
remove: () ->
@btnJ.remove()
return
# setButtonValidate: ()->
# newName = i18next.t('Submit drawing')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_ok.png')
# return
# setButtonDraw: ()->
# newName = i18next.t('Precise path')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_pen.png')
# return
# Select: add the mouse move listener on the tool (userful when creating a path in polygon mode)
# todo: move this to main, have a global onMouseMove handler like other handlers
select: (deselectItems=true, updateParameters=true, forceSelect=false, fromMiddleMouseButton=false)->
if R.city?.finished
R.alertManager.alert "Cette édition est terminée, vous ne pouvez plus dessiner.", 'info'
return
if not R.userAuthenticated and not forceSelect
R.alertManager.alert 'Log in before drawing', 'info'
return
# if R.selectedTool != @
# @setButtonValidate()
# else
# @setButtonDraw()
# R.drawingPanel.submitDrawingClicked()
# return
# R.rasterizer.drawItems()
@showDraftLimits()
R.tracer?.show()
super(deselectItems, updateParameters, fromMiddleMouseButton)
R.view.tool.onMouseMove = @move
R.toolManager.enterDrawingMode()
if not fromMiddleMouseButton
draft = R.Drawing.getDraft()
if draft?
bounds = draft.getBounds()
if bounds?
if not P.view.bounds.expand(-75).contains(bounds.center)
R.view.fitRectangle(bounds, false, if P.view.zoom < 1 then 1 else P.view.zoom)
if P.view.zoom < 1
R.alertManager.alert 'You can zoom in to draw more easily', 'info'
return
updateParameters: ()->
# R.controllerManager.setSelectedTool(@Path)
return
# Deselect: remove the mouse move listener
deselect: ()->
# @setButtonDraw()
super()
@finish()
# R.tracer?.hide()
@hideDraftLimits()
R.view.tool.onMouseMove = null
# R.toolManager.leaveDrawingMode()
return
# Begin path action:
# - deselect all and create new P.Path in all case except in polygonMode (add path to R.currentPaths)
# - emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse down event
# @param [String] author (username) of the event
# @param [Object] Item initial data (strokeWidth, strokeColor, etc.)
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
begin: (event, from=R.me, data=null) ->
if event.event.which == 2 then return # if middle mouse button (wheel) pressed: return
if R.tracer?.draggingImage then return
if 100 * P.view.zoom < 10
R.alertManager.alert("You can not draw path at a zoom smaller than 10.", "Info")
return
if @draftLimit? and not @draftLimit.contains(event.point)
@constructor.displayDraftIsTooBigError()
return
# deselect all and create new P.Path in all case except in polygonMode
if not (R.currentPaths[from]? and R.currentPaths[from].data?.polygonMode) # if not in polygon mode
R.tools.select.deselectAll(false)
R.currentPaths[from] = new @Path(Date.now(), data, null, null, null, null, R.me)
# R.currentPaths[from].select(false, false)
if @circleMode()
@circlePathRadius = 0.1
@circlePathCenter = event.point
if R.drawingMode in R.Path.PrecisePath.snappedModes
@circlePathCenter = Utils.Snap.snap2D(event.point, if R.drawingMode == 'lineOrthoDiag' then R.Path.PrecisePath.lineOrthoGridSize else R.Path.PrecisePath.orthoGridSize / 2)
@animateCircle(0, true)
@animateCircleIntervalID = setInterval(@animateCircle, 150)
R.currentPaths[from].beginCreate(event.point, event, false)
# emit event on websocket (if user is the author of the event)
# if R.me? and from==R.me then R.socket.emit( "begin", R.me, R.eventToObject(event), @name, R.currentPaths[from].data )
if @constructor.emitSocket and R.me? and from==R.me
data = R.currentPaths[from].data
data.id = R.currentPaths[from].id
# R.socket.emit "bounce", tool: @name, function: "begin", arguments: [event, R.me, data]
@using = true
return
circleMode: ()->
return R.drawingMode == 'line' or R.drawingMode == 'lineOrthoDiag' or R.drawingMode == 'orthoDiag' or R.drawingMode == 'ortho'
animateCircle: (time, createCircle=false, from=R.me)=>
path = R.currentPaths[from]
if (createCircle or @circlePath?) and path?
@circlePath?.remove()
@circlePath = new P.Path.Circle(@circlePathCenter, @circlePathRadius)
@circlePath.strokeColor = path.data.strokeColor
@circlePath.strokeWidth = path.data.strokeWidth
@circlePathRadius += 4
else
clearInterval(@animateCircleIntervalID)
@animateCircleIntervalID = null
return
showDraftLimits: ()->
@hideDraftLimits()
draftBounds = @constructor.computeDraftBounds()
path = R.currentPaths[R.me]
if path?
if draftBounds?
draftBounds = draftBounds.unite(path.getDrawingBounds())
else
draftBounds = path.getDrawingBounds()
if not draftBounds? or draftBounds.area == 0 then return null
viewBounds = R.view.grid.limitCD.bounds.clone()
@draftLimit = draftBounds.expand(2 * (@constructor.maxDraftSize - draftBounds.width), 2 * (@constructor.maxDraftSize - draftBounds.height))
# draftLimitRectangle = new P.Path.Rectangle(@draftLimit)
# @limit = R.view.grid.limitCD.clone().subtract(draftLimitRectangle)
# @limit.fillColor = new P.Color(0,0,0,0.25)
@limit = new P.Group()
l1 = new P.Path.Rectangle(viewBounds.topLeft, new P.Point(viewBounds.right, @draftLimit.top))
l2 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.top), new P.Point(@draftLimit.left, @draftLimit.bottom))
l3 = new P.Path.Rectangle(new P.Point(@draftLimit.right, @draftLimit.top), new P.Point(viewBounds.right, @draftLimit.bottom))
l4 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.bottom), viewBounds.bottomRight)
@limit.addChild(l1)
@limit.addChild(l2)
@limit.addChild(l3)
@limit.addChild(l4)
for child in @limit.children
child.fillColor = new P.Color(0,0,0,0.25)
R.view.selectionLayer.addChild(@limit)
@limit.sendToBack()
return @draftLimit
hideDraftLimits: ()->
if @limit?
@limit.remove()
@draftLimit = null
return
# Update path action:
# update path action and emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse drag event
# @param [String] author (username) of the event
update: (event, from=R.me) ->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if @circleMode() and @circlePath?
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
draftLimit = @showDraftLimits()
draftIsTooBig = draftLimit? and not draftLimit.expand(-20).contains(event.point)
draftIsOutsideFrame = not R.view.contains(event.point)
if draftIsTooBig or draftIsOutsideFrame
# if path.path?
# @previousPathColor ?= path.path.strokeColor
# path.path.strokeColor = 'red'
if R.drawingMode != 'line' and R.drawingMode != 'lineOrthoDiag'
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
@end(event, from)
if path.path?
p = path.path.clone()
p.strokeColor = 'red'
R.view.mainLayer.addChild(p)
setTimeout((()=> p.remove()), 1000)
@showDraftLimits()
# lastSegmentToPoint = new P.Path()
# lastSegmentToPoint.add(path.controlPath.lastSegment)
# lastSegmentToPoint.add(event.point)
# draftLimitRectangle = new P.Path.Rectangle(draftLimit.expand(-10))
# intersections = draftLimitRectangle.getIntersections(lastSegmentToPoint)
# draftLimitRectangle.remove()
# lastSegmentToPoint.remove()
# if intersections.length > 0
# path.updateCreate(intersections[0].point, event, false)
# @constructor.displayDraftIsTooBigError()
# @end(event, from)
return
# else if @previousPathColor? and path.path?
# path.path.strokeColor = @previousPathColor
path.updateCreate(event.point, event, false)
# R.currentPaths[from].group.visible = true
# if R.me? and from==R.me then R.socket.emit( "update", R.me, R.eventToObject(event), @name)
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "update", arguments: [event, R.me]
return
# Update path action (usually from a mouse move event, necessary for the polygon mode):
# @param [Paper event or REvent] (usually) mouse move event
move: (event) ->
if R.currentPaths[R.me]?.data?.polygonMode then R.currentPaths[R.me].createMove?(event)
return
createPath: (event, from)->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if not path.group then return
if R.me? and from==R.me # if user is the author of the event: select and save path and emit event on websocket
# if path.rectangle.area == 0
# path.remove()
# delete R.currentPaths[from]
# return
# bounds = path.getBounds()
# locks = Lock.getLocksWhichIntersect(bounds)
# for lock in locks
# if lock.rectangle.contains(bounds)
# if lock.owner == R.me
# lock.addItem(path)
# else
# R.alertManager.alert("The path intersects with a lock", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if path.getDrawingBounds().area > R.rasterizer.maxArea()
# R.alertManager.alert("The path is too big", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "createPath", arguments: [event, R.me]
if (not R.me?) or not _.isString(R.me)
R.alertManager.alert("You must log in before drawing, your drawing won't be saved", "Info")
return
path.save(true)
path.rasterize()
R.rasterizer.rasterize(path)
R.toolManager.updateButtonsVisibility()
# path.select(false)
else
path.endCreate(event.point, event)
delete R.currentPaths[from]
return
# End path action:
# - end path action
# - if not in polygon mode: select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [Paper event or REvent] (usually) mouse up event
# @param [String] author (username) of the event
end: (event, from=R.me) ->
@using = false
path = R.currentPaths[from]
if not path? then return false # when the path has been deleted because too big
draftLimit = @showDraftLimits()
if @circlePath?
R.currentPaths[from].remove()
delete R.currentPaths[from]
draftIsOutsideFrame = not R.view.contains(@circlePath.bounds)
draftIsTooBig = @draftLimit? and not @draftLimit.contains(@circlePath.bounds)
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
return false
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
return false
circleLength = @circlePath.getLength()
path = new @Path(Date.now(), null, null, null, null, null, R.me)
path.ignoreDrawingMode = true
path.beginCreate(@circlePath.getPointAt(0), event, false)
path.controlPath.removeSegments()
path.controlPath.addSegments(@circlePath.segments)
path.controlPath.addSegment(@circlePath.firstSegment)
path.rectangle = path.controlPath.bounds.expand(3*path.data.strokeWidth)
path.draw()
# step = 10
# for i in [step .. circleLength] by step
# p = @circlePath.getPointAt(i)
# path.updateCreate(p, event, false)
# path.endCreate(@circlePath.getPointAt(circleLength), event, false)
R.currentPaths[from] = path
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# if R.view.grid.rectangleOverlapsTwoPlanets(path.controlPath.bounds.expand(path.data.strokeWidth))
# R.alertManager.alert 'Your path must be in the drawing area', 'error'
# R.currentPaths[from].remove()
# delete R.currentPaths[from]
# return false
if @draftLimit? and not @draftLimit.contains(R.currentPaths[from].controlPath.bounds)
@constructor.displayDraftIsTooBigError()
R.currentPaths[from].remove()
delete R.currentPaths[from]
return false
path.endCreate(event.point, event, false)
if not path.data?.polygonMode
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# Finish path action (necessary in polygon mode):
# - check that we are in polygon mode (return otherwise)
# - end path action
# - select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [String] author (username) of the event
finish: (from=R.me)->
if not R.currentPaths[R.me]?.data?.polygonMode then return false
R.currentPaths[from].finish()
@createPath(event, from)
return true
keyUp: (event)->
switch event.key
when 'enter'
@finish?()
when 'escape'
finishingPath = @finish?()
if not finishingPath
R.tools.select.deselectAll()
return
R.Tools.Path = PathTool
return PathTool
| 172541 | define ['paper', 'R', 'Utils/Utils', 'Tools/Tool', 'UI/Button', 'i18next' ], (P, R, Utils, Tool, Button, i18next) ->
# PathTool: the mother class of all drawing tools
# doctodo: P.Path are created with three steps:
# - begin: initialize RPath: create the group, controlPath etc., and initialize the drawing
# - update: update the drawing
# - end: finish the drawing and finish RPath initialization
# doctodo: explain polygon mode
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
# begin, update, and end handlers emit the events to websocket
class PathTool extends Tool
@label = ''
@description = ''
@iconURL = ''
@buttonClasses = 'displayName btn-success'
@cursor =
position:
x: 0, y: 32
name: 'crosshair'
icon: if R.style == 'line' then 'mouse_draw' else null
@drawItems = true
@emitSocket = false
@maxDraftSize = 350
@computeDraftBounds: (paths=null)->
bounds = R.Drawing.getDraft()?.getBounds()
# console.log(bounds)
return bounds
@draftIsTooBig: (paths=null, tolerance=0)->
draftBounds = @computeDraftBounds(paths)
console.log(draftBounds)
return @draftBoundsIsTooBig(draftBounds, tolerance)
@draftBoundsIsTooBig: (draftBounds, tolerance=0)->
return draftBounds? and draftBounds.width > @maxDraftSize - tolerance or draftBounds.height > @maxDraftSize - tolerance
@displayDraftIsTooBigError: ()->
R.alertManager.alert 'Your drawing is too big', 'error'
return
# Find or create a button for the tool in the sidebar (if the button is created, add it default or favorite tool list depending on the user settings stored in local storage, and whether the tool was just created in a newly created script)
# set its name and icon if an icon url is provided, or create an icon with the letters of the name otherwise
# the icon will be made with the first two letters of the name if the name is in one word, or the first letter of each words of the name otherwise
# @param [RPath constructor] the RPath which will be created by this tool
# @param [Boolean] whether the tool was just created (with the code editor) or not
constructor: (@Path, justCreated=false) ->
@name = @Path.label
@constructor.label = @name
if @Path.description then @constructor.description = @Path.rdescription
if @Path.iconURL then @constructor.iconURL = @Path.iconURL
if @Path.category then @constructor.category = @Path.category
if @Path.cursor then @constructor.cursor = @Path.cursor
# delete tool if it already exists (when user creates a tool)
if justCreated and R.tools[@name]?
g[@<EMAIL>] = @Path
R.tools[@name].remove()
delete R.tools[@name]
R.lastPathCreated = @Path
R.tools[@name] = @
# check if a button already exists (when created fom a module)
# @btnJ = R.sidebar.allToolsJ.find('li[data-name="'+@name+'"]')
@btnJ = R.sidebar.favoriteToolsJ.find('li[data-name="'+@name+'"]')
# create button only if it does not exist
super(@btnJ.length==0)
if justCreated
@select()
if not R.userAuthenticated?
R.toolManager.enableDrawingButton(false)
return
# Remove tool button, useful when user create a tool which already existed (overwrite the tool)
remove: () ->
@btnJ.remove()
return
# setButtonValidate: ()->
# newName = i18next.t('Submit drawing')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_ok.png')
# return
# setButtonDraw: ()->
# newName = i18next.t('Precise path')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_pen.png')
# return
# Select: add the mouse move listener on the tool (userful when creating a path in polygon mode)
# todo: move this to main, have a global onMouseMove handler like other handlers
select: (deselectItems=true, updateParameters=true, forceSelect=false, fromMiddleMouseButton=false)->
if R.city?.finished
R.alertManager.alert "Cette édition est terminée, vous ne pouvez plus dessiner.", 'info'
return
if not R.userAuthenticated and not forceSelect
R.alertManager.alert 'Log in before drawing', 'info'
return
# if R.selectedTool != @
# @setButtonValidate()
# else
# @setButtonDraw()
# R.drawingPanel.submitDrawingClicked()
# return
# R.rasterizer.drawItems()
@showDraftLimits()
R.tracer?.show()
super(deselectItems, updateParameters, fromMiddleMouseButton)
R.view.tool.onMouseMove = @move
R.toolManager.enterDrawingMode()
if not fromMiddleMouseButton
draft = R.Drawing.getDraft()
if draft?
bounds = draft.getBounds()
if bounds?
if not P.view.bounds.expand(-75).contains(bounds.center)
R.view.fitRectangle(bounds, false, if P.view.zoom < 1 then 1 else P.view.zoom)
if P.view.zoom < 1
R.alertManager.alert 'You can zoom in to draw more easily', 'info'
return
updateParameters: ()->
# R.controllerManager.setSelectedTool(@Path)
return
# Deselect: remove the mouse move listener
deselect: ()->
# @setButtonDraw()
super()
@finish()
# R.tracer?.hide()
@hideDraftLimits()
R.view.tool.onMouseMove = null
# R.toolManager.leaveDrawingMode()
return
# Begin path action:
# - deselect all and create new P.Path in all case except in polygonMode (add path to R.currentPaths)
# - emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse down event
# @param [String] author (username) of the event
# @param [Object] Item initial data (strokeWidth, strokeColor, etc.)
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
begin: (event, from=R.me, data=null) ->
if event.event.which == 2 then return # if middle mouse button (wheel) pressed: return
if R.tracer?.draggingImage then return
if 100 * P.view.zoom < 10
R.alertManager.alert("You can not draw path at a zoom smaller than 10.", "Info")
return
if @draftLimit? and not @draftLimit.contains(event.point)
@constructor.displayDraftIsTooBigError()
return
# deselect all and create new P.Path in all case except in polygonMode
if not (R.currentPaths[from]? and R.currentPaths[from].data?.polygonMode) # if not in polygon mode
R.tools.select.deselectAll(false)
R.currentPaths[from] = new @Path(Date.now(), data, null, null, null, null, R.me)
# R.currentPaths[from].select(false, false)
if @circleMode()
@circlePathRadius = 0.1
@circlePathCenter = event.point
if R.drawingMode in R.Path.PrecisePath.snappedModes
@circlePathCenter = Utils.Snap.snap2D(event.point, if R.drawingMode == 'lineOrthoDiag' then R.Path.PrecisePath.lineOrthoGridSize else R.Path.PrecisePath.orthoGridSize / 2)
@animateCircle(0, true)
@animateCircleIntervalID = setInterval(@animateCircle, 150)
R.currentPaths[from].beginCreate(event.point, event, false)
# emit event on websocket (if user is the author of the event)
# if R.me? and from==R.me then R.socket.emit( "begin", R.me, R.eventToObject(event), @name, R.currentPaths[from].data )
if @constructor.emitSocket and R.me? and from==R.me
data = R.currentPaths[from].data
data.id = R.currentPaths[from].id
# R.socket.emit "bounce", tool: @name, function: "begin", arguments: [event, R.me, data]
@using = true
return
circleMode: ()->
return R.drawingMode == 'line' or R.drawingMode == 'lineOrthoDiag' or R.drawingMode == 'orthoDiag' or R.drawingMode == 'ortho'
animateCircle: (time, createCircle=false, from=R.me)=>
path = R.currentPaths[from]
if (createCircle or @circlePath?) and path?
@circlePath?.remove()
@circlePath = new P.Path.Circle(@circlePathCenter, @circlePathRadius)
@circlePath.strokeColor = path.data.strokeColor
@circlePath.strokeWidth = path.data.strokeWidth
@circlePathRadius += 4
else
clearInterval(@animateCircleIntervalID)
@animateCircleIntervalID = null
return
showDraftLimits: ()->
@hideDraftLimits()
draftBounds = @constructor.computeDraftBounds()
path = R.currentPaths[R.me]
if path?
if draftBounds?
draftBounds = draftBounds.unite(path.getDrawingBounds())
else
draftBounds = path.getDrawingBounds()
if not draftBounds? or draftBounds.area == 0 then return null
viewBounds = R.view.grid.limitCD.bounds.clone()
@draftLimit = draftBounds.expand(2 * (@constructor.maxDraftSize - draftBounds.width), 2 * (@constructor.maxDraftSize - draftBounds.height))
# draftLimitRectangle = new P.Path.Rectangle(@draftLimit)
# @limit = R.view.grid.limitCD.clone().subtract(draftLimitRectangle)
# @limit.fillColor = new P.Color(0,0,0,0.25)
@limit = new P.Group()
l1 = new P.Path.Rectangle(viewBounds.topLeft, new P.Point(viewBounds.right, @draftLimit.top))
l2 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.top), new P.Point(@draftLimit.left, @draftLimit.bottom))
l3 = new P.Path.Rectangle(new P.Point(@draftLimit.right, @draftLimit.top), new P.Point(viewBounds.right, @draftLimit.bottom))
l4 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.bottom), viewBounds.bottomRight)
@limit.addChild(l1)
@limit.addChild(l2)
@limit.addChild(l3)
@limit.addChild(l4)
for child in @limit.children
child.fillColor = new P.Color(0,0,0,0.25)
R.view.selectionLayer.addChild(@limit)
@limit.sendToBack()
return @draftLimit
hideDraftLimits: ()->
if @limit?
@limit.remove()
@draftLimit = null
return
# Update path action:
# update path action and emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse drag event
# @param [String] author (username) of the event
update: (event, from=R.me) ->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if @circleMode() and @circlePath?
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
draftLimit = @showDraftLimits()
draftIsTooBig = draftLimit? and not draftLimit.expand(-20).contains(event.point)
draftIsOutsideFrame = not R.view.contains(event.point)
if draftIsTooBig or draftIsOutsideFrame
# if path.path?
# @previousPathColor ?= path.path.strokeColor
# path.path.strokeColor = 'red'
if R.drawingMode != 'line' and R.drawingMode != 'lineOrthoDiag'
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
@end(event, from)
if path.path?
p = path.path.clone()
p.strokeColor = 'red'
R.view.mainLayer.addChild(p)
setTimeout((()=> p.remove()), 1000)
@showDraftLimits()
# lastSegmentToPoint = new P.Path()
# lastSegmentToPoint.add(path.controlPath.lastSegment)
# lastSegmentToPoint.add(event.point)
# draftLimitRectangle = new P.Path.Rectangle(draftLimit.expand(-10))
# intersections = draftLimitRectangle.getIntersections(lastSegmentToPoint)
# draftLimitRectangle.remove()
# lastSegmentToPoint.remove()
# if intersections.length > 0
# path.updateCreate(intersections[0].point, event, false)
# @constructor.displayDraftIsTooBigError()
# @end(event, from)
return
# else if @previousPathColor? and path.path?
# path.path.strokeColor = @previousPathColor
path.updateCreate(event.point, event, false)
# R.currentPaths[from].group.visible = true
# if R.me? and from==R.me then R.socket.emit( "update", R.me, R.eventToObject(event), @name)
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "update", arguments: [event, R.me]
return
# Update path action (usually from a mouse move event, necessary for the polygon mode):
# @param [Paper event or REvent] (usually) mouse move event
move: (event) ->
if R.currentPaths[R.me]?.data?.polygonMode then R.currentPaths[R.me].createMove?(event)
return
createPath: (event, from)->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if not path.group then return
if R.me? and from==R.me # if user is the author of the event: select and save path and emit event on websocket
# if path.rectangle.area == 0
# path.remove()
# delete R.currentPaths[from]
# return
# bounds = path.getBounds()
# locks = Lock.getLocksWhichIntersect(bounds)
# for lock in locks
# if lock.rectangle.contains(bounds)
# if lock.owner == R.me
# lock.addItem(path)
# else
# R.alertManager.alert("The path intersects with a lock", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if path.getDrawingBounds().area > R.rasterizer.maxArea()
# R.alertManager.alert("The path is too big", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "createPath", arguments: [event, R.me]
if (not R.me?) or not _.isString(R.me)
R.alertManager.alert("You must log in before drawing, your drawing won't be saved", "Info")
return
path.save(true)
path.rasterize()
R.rasterizer.rasterize(path)
R.toolManager.updateButtonsVisibility()
# path.select(false)
else
path.endCreate(event.point, event)
delete R.currentPaths[from]
return
# End path action:
# - end path action
# - if not in polygon mode: select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [Paper event or REvent] (usually) mouse up event
# @param [String] author (username) of the event
end: (event, from=R.me) ->
@using = false
path = R.currentPaths[from]
if not path? then return false # when the path has been deleted because too big
draftLimit = @showDraftLimits()
if @circlePath?
R.currentPaths[from].remove()
delete R.currentPaths[from]
draftIsOutsideFrame = not R.view.contains(@circlePath.bounds)
draftIsTooBig = @draftLimit? and not @draftLimit.contains(@circlePath.bounds)
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
return false
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
return false
circleLength = @circlePath.getLength()
path = new @Path(Date.now(), null, null, null, null, null, R.me)
path.ignoreDrawingMode = true
path.beginCreate(@circlePath.getPointAt(0), event, false)
path.controlPath.removeSegments()
path.controlPath.addSegments(@circlePath.segments)
path.controlPath.addSegment(@circlePath.firstSegment)
path.rectangle = path.controlPath.bounds.expand(3*path.data.strokeWidth)
path.draw()
# step = 10
# for i in [step .. circleLength] by step
# p = @circlePath.getPointAt(i)
# path.updateCreate(p, event, false)
# path.endCreate(@circlePath.getPointAt(circleLength), event, false)
R.currentPaths[from] = path
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# if R.view.grid.rectangleOverlapsTwoPlanets(path.controlPath.bounds.expand(path.data.strokeWidth))
# R.alertManager.alert 'Your path must be in the drawing area', 'error'
# R.currentPaths[from].remove()
# delete R.currentPaths[from]
# return false
if @draftLimit? and not @draftLimit.contains(R.currentPaths[from].controlPath.bounds)
@constructor.displayDraftIsTooBigError()
R.currentPaths[from].remove()
delete R.currentPaths[from]
return false
path.endCreate(event.point, event, false)
if not path.data?.polygonMode
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# Finish path action (necessary in polygon mode):
# - check that we are in polygon mode (return otherwise)
# - end path action
# - select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [String] author (username) of the event
finish: (from=R.me)->
if not R.currentPaths[R.me]?.data?.polygonMode then return false
R.currentPaths[from].finish()
@createPath(event, from)
return true
keyUp: (event)->
switch event.key
when 'enter'
@finish?()
when 'escape'
finishingPath = @finish?()
if not finishingPath
R.tools.select.deselectAll()
return
R.Tools.Path = PathTool
return PathTool
| true | define ['paper', 'R', 'Utils/Utils', 'Tools/Tool', 'UI/Button', 'i18next' ], (P, R, Utils, Tool, Button, i18next) ->
# PathTool: the mother class of all drawing tools
# doctodo: P.Path are created with three steps:
# - begin: initialize RPath: create the group, controlPath etc., and initialize the drawing
# - update: update the drawing
# - end: finish the drawing and finish RPath initialization
# doctodo: explain polygon mode
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
# begin, update, and end handlers emit the events to websocket
class PathTool extends Tool
@label = ''
@description = ''
@iconURL = ''
@buttonClasses = 'displayName btn-success'
@cursor =
position:
x: 0, y: 32
name: 'crosshair'
icon: if R.style == 'line' then 'mouse_draw' else null
@drawItems = true
@emitSocket = false
@maxDraftSize = 350
@computeDraftBounds: (paths=null)->
bounds = R.Drawing.getDraft()?.getBounds()
# console.log(bounds)
return bounds
@draftIsTooBig: (paths=null, tolerance=0)->
draftBounds = @computeDraftBounds(paths)
console.log(draftBounds)
return @draftBoundsIsTooBig(draftBounds, tolerance)
@draftBoundsIsTooBig: (draftBounds, tolerance=0)->
return draftBounds? and draftBounds.width > @maxDraftSize - tolerance or draftBounds.height > @maxDraftSize - tolerance
@displayDraftIsTooBigError: ()->
R.alertManager.alert 'Your drawing is too big', 'error'
return
# Find or create a button for the tool in the sidebar (if the button is created, add it default or favorite tool list depending on the user settings stored in local storage, and whether the tool was just created in a newly created script)
# set its name and icon if an icon url is provided, or create an icon with the letters of the name otherwise
# the icon will be made with the first two letters of the name if the name is in one word, or the first letter of each words of the name otherwise
# @param [RPath constructor] the RPath which will be created by this tool
# @param [Boolean] whether the tool was just created (with the code editor) or not
constructor: (@Path, justCreated=false) ->
@name = @Path.label
@constructor.label = @name
if @Path.description then @constructor.description = @Path.rdescription
if @Path.iconURL then @constructor.iconURL = @Path.iconURL
if @Path.category then @constructor.category = @Path.category
if @Path.cursor then @constructor.cursor = @Path.cursor
# delete tool if it already exists (when user creates a tool)
if justCreated and R.tools[@name]?
g[@PI:EMAIL:<EMAIL>END_PI] = @Path
R.tools[@name].remove()
delete R.tools[@name]
R.lastPathCreated = @Path
R.tools[@name] = @
# check if a button already exists (when created fom a module)
# @btnJ = R.sidebar.allToolsJ.find('li[data-name="'+@name+'"]')
@btnJ = R.sidebar.favoriteToolsJ.find('li[data-name="'+@name+'"]')
# create button only if it does not exist
super(@btnJ.length==0)
if justCreated
@select()
if not R.userAuthenticated?
R.toolManager.enableDrawingButton(false)
return
# Remove tool button, useful when user create a tool which already existed (overwrite the tool)
remove: () ->
@btnJ.remove()
return
# setButtonValidate: ()->
# newName = i18next.t('Submit drawing')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_ok.png')
# return
# setButtonDraw: ()->
# newName = i18next.t('Precise path')
# @btnJ.find('.tool-name').attr('data-i18n', newName).text(newName)
# @btnJ.find('img').attr('src', '/static/images/icons/inverted/icones_icon_pen.png')
# return
# Select: add the mouse move listener on the tool (userful when creating a path in polygon mode)
# todo: move this to main, have a global onMouseMove handler like other handlers
select: (deselectItems=true, updateParameters=true, forceSelect=false, fromMiddleMouseButton=false)->
if R.city?.finished
R.alertManager.alert "Cette édition est terminée, vous ne pouvez plus dessiner.", 'info'
return
if not R.userAuthenticated and not forceSelect
R.alertManager.alert 'Log in before drawing', 'info'
return
# if R.selectedTool != @
# @setButtonValidate()
# else
# @setButtonDraw()
# R.drawingPanel.submitDrawingClicked()
# return
# R.rasterizer.drawItems()
@showDraftLimits()
R.tracer?.show()
super(deselectItems, updateParameters, fromMiddleMouseButton)
R.view.tool.onMouseMove = @move
R.toolManager.enterDrawingMode()
if not fromMiddleMouseButton
draft = R.Drawing.getDraft()
if draft?
bounds = draft.getBounds()
if bounds?
if not P.view.bounds.expand(-75).contains(bounds.center)
R.view.fitRectangle(bounds, false, if P.view.zoom < 1 then 1 else P.view.zoom)
if P.view.zoom < 1
R.alertManager.alert 'You can zoom in to draw more easily', 'info'
return
updateParameters: ()->
# R.controllerManager.setSelectedTool(@Path)
return
# Deselect: remove the mouse move listener
deselect: ()->
# @setButtonDraw()
super()
@finish()
# R.tracer?.hide()
@hideDraftLimits()
R.view.tool.onMouseMove = null
# R.toolManager.leaveDrawingMode()
return
# Begin path action:
# - deselect all and create new P.Path in all case except in polygonMode (add path to R.currentPaths)
# - emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse down event
# @param [String] author (username) of the event
# @param [Object] Item initial data (strokeWidth, strokeColor, etc.)
# begin, update, and end handlers are called by onMouseDown handler (then from == R.me, data == null) and by socket.on "begin" signal (then from == author of the signal, data == Item initial data)
begin: (event, from=R.me, data=null) ->
if event.event.which == 2 then return # if middle mouse button (wheel) pressed: return
if R.tracer?.draggingImage then return
if 100 * P.view.zoom < 10
R.alertManager.alert("You can not draw path at a zoom smaller than 10.", "Info")
return
if @draftLimit? and not @draftLimit.contains(event.point)
@constructor.displayDraftIsTooBigError()
return
# deselect all and create new P.Path in all case except in polygonMode
if not (R.currentPaths[from]? and R.currentPaths[from].data?.polygonMode) # if not in polygon mode
R.tools.select.deselectAll(false)
R.currentPaths[from] = new @Path(Date.now(), data, null, null, null, null, R.me)
# R.currentPaths[from].select(false, false)
if @circleMode()
@circlePathRadius = 0.1
@circlePathCenter = event.point
if R.drawingMode in R.Path.PrecisePath.snappedModes
@circlePathCenter = Utils.Snap.snap2D(event.point, if R.drawingMode == 'lineOrthoDiag' then R.Path.PrecisePath.lineOrthoGridSize else R.Path.PrecisePath.orthoGridSize / 2)
@animateCircle(0, true)
@animateCircleIntervalID = setInterval(@animateCircle, 150)
R.currentPaths[from].beginCreate(event.point, event, false)
# emit event on websocket (if user is the author of the event)
# if R.me? and from==R.me then R.socket.emit( "begin", R.me, R.eventToObject(event), @name, R.currentPaths[from].data )
if @constructor.emitSocket and R.me? and from==R.me
data = R.currentPaths[from].data
data.id = R.currentPaths[from].id
# R.socket.emit "bounce", tool: @name, function: "begin", arguments: [event, R.me, data]
@using = true
return
circleMode: ()->
return R.drawingMode == 'line' or R.drawingMode == 'lineOrthoDiag' or R.drawingMode == 'orthoDiag' or R.drawingMode == 'ortho'
animateCircle: (time, createCircle=false, from=R.me)=>
path = R.currentPaths[from]
if (createCircle or @circlePath?) and path?
@circlePath?.remove()
@circlePath = new P.Path.Circle(@circlePathCenter, @circlePathRadius)
@circlePath.strokeColor = path.data.strokeColor
@circlePath.strokeWidth = path.data.strokeWidth
@circlePathRadius += 4
else
clearInterval(@animateCircleIntervalID)
@animateCircleIntervalID = null
return
showDraftLimits: ()->
@hideDraftLimits()
draftBounds = @constructor.computeDraftBounds()
path = R.currentPaths[R.me]
if path?
if draftBounds?
draftBounds = draftBounds.unite(path.getDrawingBounds())
else
draftBounds = path.getDrawingBounds()
if not draftBounds? or draftBounds.area == 0 then return null
viewBounds = R.view.grid.limitCD.bounds.clone()
@draftLimit = draftBounds.expand(2 * (@constructor.maxDraftSize - draftBounds.width), 2 * (@constructor.maxDraftSize - draftBounds.height))
# draftLimitRectangle = new P.Path.Rectangle(@draftLimit)
# @limit = R.view.grid.limitCD.clone().subtract(draftLimitRectangle)
# @limit.fillColor = new P.Color(0,0,0,0.25)
@limit = new P.Group()
l1 = new P.Path.Rectangle(viewBounds.topLeft, new P.Point(viewBounds.right, @draftLimit.top))
l2 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.top), new P.Point(@draftLimit.left, @draftLimit.bottom))
l3 = new P.Path.Rectangle(new P.Point(@draftLimit.right, @draftLimit.top), new P.Point(viewBounds.right, @draftLimit.bottom))
l4 = new P.Path.Rectangle(new P.Point(viewBounds.left, @draftLimit.bottom), viewBounds.bottomRight)
@limit.addChild(l1)
@limit.addChild(l2)
@limit.addChild(l3)
@limit.addChild(l4)
for child in @limit.children
child.fillColor = new P.Color(0,0,0,0.25)
R.view.selectionLayer.addChild(@limit)
@limit.sendToBack()
return @draftLimit
hideDraftLimits: ()->
if @limit?
@limit.remove()
@draftLimit = null
return
# Update path action:
# update path action and emit event on websocket (if user is the author of the event)
# @param [Paper event or REvent] (usually) mouse drag event
# @param [String] author (username) of the event
update: (event, from=R.me) ->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if @circleMode() and @circlePath?
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
draftLimit = @showDraftLimits()
draftIsTooBig = draftLimit? and not draftLimit.expand(-20).contains(event.point)
draftIsOutsideFrame = not R.view.contains(event.point)
if draftIsTooBig or draftIsOutsideFrame
# if path.path?
# @previousPathColor ?= path.path.strokeColor
# path.path.strokeColor = 'red'
if R.drawingMode != 'line' and R.drawingMode != 'lineOrthoDiag'
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
@end(event, from)
if path.path?
p = path.path.clone()
p.strokeColor = 'red'
R.view.mainLayer.addChild(p)
setTimeout((()=> p.remove()), 1000)
@showDraftLimits()
# lastSegmentToPoint = new P.Path()
# lastSegmentToPoint.add(path.controlPath.lastSegment)
# lastSegmentToPoint.add(event.point)
# draftLimitRectangle = new P.Path.Rectangle(draftLimit.expand(-10))
# intersections = draftLimitRectangle.getIntersections(lastSegmentToPoint)
# draftLimitRectangle.remove()
# lastSegmentToPoint.remove()
# if intersections.length > 0
# path.updateCreate(intersections[0].point, event, false)
# @constructor.displayDraftIsTooBigError()
# @end(event, from)
return
# else if @previousPathColor? and path.path?
# path.path.strokeColor = @previousPathColor
path.updateCreate(event.point, event, false)
# R.currentPaths[from].group.visible = true
# if R.me? and from==R.me then R.socket.emit( "update", R.me, R.eventToObject(event), @name)
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "update", arguments: [event, R.me]
return
# Update path action (usually from a mouse move event, necessary for the polygon mode):
# @param [Paper event or REvent] (usually) mouse move event
move: (event) ->
if R.currentPaths[R.me]?.data?.polygonMode then R.currentPaths[R.me].createMove?(event)
return
createPath: (event, from)->
path = R.currentPaths[from]
if not path? then return # when the path has been deleted because too big
if not path.group then return
if R.me? and from==R.me # if user is the author of the event: select and save path and emit event on websocket
# if path.rectangle.area == 0
# path.remove()
# delete R.currentPaths[from]
# return
# bounds = path.getBounds()
# locks = Lock.getLocksWhichIntersect(bounds)
# for lock in locks
# if lock.rectangle.contains(bounds)
# if lock.owner == R.me
# lock.addItem(path)
# else
# R.alertManager.alert("The path intersects with a lock", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if path.getDrawingBounds().area > R.rasterizer.maxArea()
# R.alertManager.alert("The path is too big", "Warning")
# path.remove()
# delete R.currentPaths[from]
# return
# if @constructor.emitSocket and R.me? and from==R.me then R.socket.emit "bounce", tool: @name, function: "createPath", arguments: [event, R.me]
if (not R.me?) or not _.isString(R.me)
R.alertManager.alert("You must log in before drawing, your drawing won't be saved", "Info")
return
path.save(true)
path.rasterize()
R.rasterizer.rasterize(path)
R.toolManager.updateButtonsVisibility()
# path.select(false)
else
path.endCreate(event.point, event)
delete R.currentPaths[from]
return
# End path action:
# - end path action
# - if not in polygon mode: select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [Paper event or REvent] (usually) mouse up event
# @param [String] author (username) of the event
end: (event, from=R.me) ->
@using = false
path = R.currentPaths[from]
if not path? then return false # when the path has been deleted because too big
draftLimit = @showDraftLimits()
if @circlePath?
R.currentPaths[from].remove()
delete R.currentPaths[from]
draftIsOutsideFrame = not R.view.contains(@circlePath.bounds)
draftIsTooBig = @draftLimit? and not @draftLimit.contains(@circlePath.bounds)
if draftIsTooBig
@constructor.displayDraftIsTooBigError()
return false
else if draftIsOutsideFrame
R.alertManager.alert 'Your path must be in the drawing area', 'error'
return false
circleLength = @circlePath.getLength()
path = new @Path(Date.now(), null, null, null, null, null, R.me)
path.ignoreDrawingMode = true
path.beginCreate(@circlePath.getPointAt(0), event, false)
path.controlPath.removeSegments()
path.controlPath.addSegments(@circlePath.segments)
path.controlPath.addSegment(@circlePath.firstSegment)
path.rectangle = path.controlPath.bounds.expand(3*path.data.strokeWidth)
path.draw()
# step = 10
# for i in [step .. circleLength] by step
# p = @circlePath.getPointAt(i)
# path.updateCreate(p, event, false)
# path.endCreate(@circlePath.getPointAt(circleLength), event, false)
R.currentPaths[from] = path
@circlePath.remove()
@circlePath = null
clearInterval(@animateCircleIntervalID)
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# if R.view.grid.rectangleOverlapsTwoPlanets(path.controlPath.bounds.expand(path.data.strokeWidth))
# R.alertManager.alert 'Your path must be in the drawing area', 'error'
# R.currentPaths[from].remove()
# delete R.currentPaths[from]
# return false
if @draftLimit? and not @draftLimit.contains(R.currentPaths[from].controlPath.bounds)
@constructor.displayDraftIsTooBigError()
R.currentPaths[from].remove()
delete R.currentPaths[from]
return false
path.endCreate(event.point, event, false)
if not path.data?.polygonMode
@createPath(event, from)
R.drawingPanel.showSubmitDrawing()
return
# Finish path action (necessary in polygon mode):
# - check that we are in polygon mode (return otherwise)
# - end path action
# - select and save path and emit event on websocket (if user is the author of the event), (remove path from R.currentPaths)
# @param [String] author (username) of the event
finish: (from=R.me)->
if not R.currentPaths[R.me]?.data?.polygonMode then return false
R.currentPaths[from].finish()
@createPath(event, from)
return true
keyUp: (event)->
switch event.key
when 'enter'
@finish?()
when 'escape'
finishingPath = @finish?()
if not finishingPath
R.tools.select.deselectAll()
return
R.Tools.Path = PathTool
return PathTool
|
[
{
"context": "ta : \"foo\"})\n store2.insert({id: 2, name: \"barf\"})\n store1.getAllKeys().then (keys) ->\n ",
"end": 1354,
"score": 0.8104575872421265,
"start": 1350,
"tag": "NAME",
"value": "barf"
},
{
"context": "\"foo\"})\n stores[1].insert({id: 2, name:... | app/bower_components/angular-indexedDB/test/spec/angular-indexeddb-spec.coffee | Ajtimstoj/registerform | 0 | 'use strict'
describe "$indexedDB", ->
providerConfig = {}
$q = {}
beforeEach ->
angular.module('indexedDB').config ($indexedDBProvider) ->
providerConfig = $indexedDBProvider
module 'indexedDB'
inject(->)
itPromises = (message, testFunc) ->
it message, (done) ->
successCb = sinon.spy()
testFunc.apply(this, []).then(successCb).catch (error) ->
console.error "Unhandled failure from test: #{error}"
expect(false).toBeTruthy()
.finally ->
done()
promiseBefore = (beforeFunc) ->
beforeEach (done) ->
beforeFunc.apply(this, []).finally(done)
beforeEach inject ($indexedDB, _$q_) ->
@subject = $indexedDB
$q = _$q_
beforeEach ->
providerConfig.connection("testDB")
.upgradeDatabase 1, (event, db, txn) ->
db.createObjectStore "TestObjects", keyPath: 'id'
.upgradeDatabase 2, (event, db, txn) ->
store = db.createObjectStore "ComplexTestObjects", keyPath: 'id'
store.createIndex "name", "name", unique: false
afterEach (done) ->
@subject.deleteDatabase().finally(done)
describe "#openStores", ->
itPromises "returns the object stores", ->
@subject.openStores ["TestObjects","ComplexTestObjects"] , (store1, store2) ->
store1.insert({id: 1, data : "foo"})
store2.insert({id: 2, name: "barf"})
store1.getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
itPromises "to cause a failure when the store does not exist.", ->
success = sinon.spy()
fail = sinon.spy()
@subject.openStores ["TestObjects","NonExistentObjects"] , success
.then(success,fail)
.finally ->
expect(fail).toHaveBeenCalledWith("Object stores TestObjects,NonExistentObjects do not exist.")
expect(success).not.toHaveBeenCalled()
describe "#openAllStores", ->
itPromises "returns all the object stores", ->
@subject.openAllStores (stores...) ->
expect(stores.length).toEqual(2)
stores[0].insert({id: 1, data : "foo"})
stores[1].insert({id: 2, name: "barf"})
stores[0].getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
describe '#flush', ->
itPromises "it flushes any waiting transactions", ->
@subject.openStore "TestObjects", (store) =>
for i in [0 .. 10000]
store.insert([
{id: i, data: "foo", extra: "a" * i}
])
@subject.flush()
describe '#openStore', ->
itPromises "returns the object store", ->
@subject.openStore "TestObjects", (store) ->
store.getAllKeys().then (keys) ->
expect(keys.length).toEqual(0)
itPromises "throws an error for non-existent stores", ->
notCalled = sinon.spy()
called = sinon.spy()
@subject.openStore("NoSuchStore",notCalled).catch (problem) ->
expect(problem).toEqual("Object stores NoSuchStore do not exist.")
called()
.finally ->
expect(notCalled).not.toHaveBeenCalled()
expect(called).toHaveBeenCalled()
describe "multiple transactions", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can open a transaction within a transaction", ->
@subject.openStore "TestObjects", (store) =>
p = store.insert
@subject.openStore "TestObjects", (store2) ->
expect( store2 ).toBeTruthy()
describe "#delete", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can delete an item", ->
@subject.openStore "TestObjects", (store) ->
store.delete(1)
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].id).toEqual(2)
itPromises "errors gracefully when it doesn't exist", ->
@subject.openStore "TestObjects", (store) ->
store.delete(55)
.catch ->
expect(true).toBeFalsy()
describe "#query", ->
promiseBefore ->
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"},
{id: 3, data: "woof", name: "zzz"}
]
)
itPromises "iterates by the index name with lt and lte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$lt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(2)
store.findWhere(store.query().$index("name").$lte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[0].id).toEqual(2)
expect( results[1].id).toEqual(1)
itPromises "iterates by the index name with gt and gte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$gt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(3)
store.findWhere(store.query().$index("name").$gte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[1].id).toEqual(3)
expect( results[0].id).toEqual(1)
itPromises "finds one object with $eq", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$eq("bbb")).then (results) ->
expect( results[0].id ).toEqual(1)
expect( results.length).toEqual(1)
itPromises "finds two objects with $between", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$between("aaa","bbb")).then (results) ->
expect( results[0].id ).toEqual(2)
expect( results.length).toEqual(2)
itPromises "orders differently with $desc", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$desc()).then (results) ->
expect( results[0].id ).toEqual(3)
expect( results.length).toEqual(3)
describe "#find", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "finds an existing item", ->
@subject.openStore "TestObjects", (store) ->
store.find(1).then (item) ->
expect(item.data).toEqual("foo")
itPromises "returns the result of the callback to the receiver", ->
@subject.openStore "TestObjects", (store) ->
store.find(1)
.then (item) ->
expect(item.data).toEqual("foo")
true
itPromises "does not find a non-existent item", ->
@subject.openStore "TestObjects", (store) ->
store.find(404).then (item) ->
expect(false).toBeTruthy()
.catch (error) ->
expect(true).toBeTruthy()
describe "#each", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
])
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
]
)
itPromises " yields the items in succession", ->
@subject.openStore "TestObjects", (store) ->
i = 1
store.each().then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
itPromises " yields the items in opposite succession given a different direction", ->
@subject.openStore "TestObjects", (store) =>
i = 2
store.each(direction: @subject.queryDirection.descending).then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
itPromises " uses a range on the object keys", ->
@subject.openStore "TestObjects", (store) =>
i = 1
store.each(beginKey: 1, endKey: 1).then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
.then (items) ->
expect(items.length).toEqual(1)
itPromises " can operate on an index", ->
@subject.openStore "ComplexTestObjects", (store) ->
i = 2
store.eachBy("name").then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
describe "#upsert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "when openStore returns nothing it doesn't fail", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
return
@subject.openStore "TestObjects", (store) ->
store.getAll().then (objects) ->
console.log("got all objects?", objects)
expect(objects.length).toEqual(1)
itPromises "can add an item of the same key twice", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"})
store.upsert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(true).toBeFalsy()
.then ->
expect(true).toBeTruthy()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.upsert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
describe "#insert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "cannot add an item of the same key twice", ->
successCb = sinon.spy()
failedCb = sinon.spy()
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"})
store.insert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(errorMessage).toEqual("Key already exists in the object store.")
failedCb()
return $q.reject("expected")
.then(successCb)
.catch (error) ->
#We expect the overall transaction to also fail
expect(error).toEqual("Transaction Error")
return
.finally ->
expect(successCb).not.toHaveBeenCalled()
expect(failedCb).toHaveBeenCalled()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
itPromises "does nothing for no items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([]).then ->
expect(true).toBeTruthy()
| 128490 | 'use strict'
describe "$indexedDB", ->
providerConfig = {}
$q = {}
beforeEach ->
angular.module('indexedDB').config ($indexedDBProvider) ->
providerConfig = $indexedDBProvider
module 'indexedDB'
inject(->)
itPromises = (message, testFunc) ->
it message, (done) ->
successCb = sinon.spy()
testFunc.apply(this, []).then(successCb).catch (error) ->
console.error "Unhandled failure from test: #{error}"
expect(false).toBeTruthy()
.finally ->
done()
promiseBefore = (beforeFunc) ->
beforeEach (done) ->
beforeFunc.apply(this, []).finally(done)
beforeEach inject ($indexedDB, _$q_) ->
@subject = $indexedDB
$q = _$q_
beforeEach ->
providerConfig.connection("testDB")
.upgradeDatabase 1, (event, db, txn) ->
db.createObjectStore "TestObjects", keyPath: 'id'
.upgradeDatabase 2, (event, db, txn) ->
store = db.createObjectStore "ComplexTestObjects", keyPath: 'id'
store.createIndex "name", "name", unique: false
afterEach (done) ->
@subject.deleteDatabase().finally(done)
describe "#openStores", ->
itPromises "returns the object stores", ->
@subject.openStores ["TestObjects","ComplexTestObjects"] , (store1, store2) ->
store1.insert({id: 1, data : "foo"})
store2.insert({id: 2, name: "<NAME>"})
store1.getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
itPromises "to cause a failure when the store does not exist.", ->
success = sinon.spy()
fail = sinon.spy()
@subject.openStores ["TestObjects","NonExistentObjects"] , success
.then(success,fail)
.finally ->
expect(fail).toHaveBeenCalledWith("Object stores TestObjects,NonExistentObjects do not exist.")
expect(success).not.toHaveBeenCalled()
describe "#openAllStores", ->
itPromises "returns all the object stores", ->
@subject.openAllStores (stores...) ->
expect(stores.length).toEqual(2)
stores[0].insert({id: 1, data : "foo"})
stores[1].insert({id: 2, name: "<NAME>"})
stores[0].getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
describe '#flush', ->
itPromises "it flushes any waiting transactions", ->
@subject.openStore "TestObjects", (store) =>
for i in [0 .. 10000]
store.insert([
{id: i, data: "foo", extra: "a" * i}
])
@subject.flush()
describe '#openStore', ->
itPromises "returns the object store", ->
@subject.openStore "TestObjects", (store) ->
store.getAllKeys().then (keys) ->
expect(keys.length).toEqual(0)
itPromises "throws an error for non-existent stores", ->
notCalled = sinon.spy()
called = sinon.spy()
@subject.openStore("NoSuchStore",notCalled).catch (problem) ->
expect(problem).toEqual("Object stores NoSuchStore do not exist.")
called()
.finally ->
expect(notCalled).not.toHaveBeenCalled()
expect(called).toHaveBeenCalled()
describe "multiple transactions", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can open a transaction within a transaction", ->
@subject.openStore "TestObjects", (store) =>
p = store.insert
@subject.openStore "TestObjects", (store2) ->
expect( store2 ).toBeTruthy()
describe "#delete", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can delete an item", ->
@subject.openStore "TestObjects", (store) ->
store.delete(1)
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].id).toEqual(2)
itPromises "errors gracefully when it doesn't exist", ->
@subject.openStore "TestObjects", (store) ->
store.delete(55)
.catch ->
expect(true).toBeFalsy()
describe "#query", ->
promiseBefore ->
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"},
{id: 3, data: "woof", name: "zzz"}
]
)
itPromises "iterates by the index name with lt and lte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$lt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(2)
store.findWhere(store.query().$index("name").$lte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[0].id).toEqual(2)
expect( results[1].id).toEqual(1)
itPromises "iterates by the index name with gt and gte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$gt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(3)
store.findWhere(store.query().$index("name").$gte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[1].id).toEqual(3)
expect( results[0].id).toEqual(1)
itPromises "finds one object with $eq", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$eq("bbb")).then (results) ->
expect( results[0].id ).toEqual(1)
expect( results.length).toEqual(1)
itPromises "finds two objects with $between", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$between("aaa","bbb")).then (results) ->
expect( results[0].id ).toEqual(2)
expect( results.length).toEqual(2)
itPromises "orders differently with $desc", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$desc()).then (results) ->
expect( results[0].id ).toEqual(3)
expect( results.length).toEqual(3)
describe "#find", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "finds an existing item", ->
@subject.openStore "TestObjects", (store) ->
store.find(1).then (item) ->
expect(item.data).toEqual("foo")
itPromises "returns the result of the callback to the receiver", ->
@subject.openStore "TestObjects", (store) ->
store.find(1)
.then (item) ->
expect(item.data).toEqual("foo")
true
itPromises "does not find a non-existent item", ->
@subject.openStore "TestObjects", (store) ->
store.find(404).then (item) ->
expect(false).toBeTruthy()
.catch (error) ->
expect(true).toBeTruthy()
describe "#each", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
])
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
]
)
itPromises " yields the items in succession", ->
@subject.openStore "TestObjects", (store) ->
i = 1
store.each().then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
itPromises " yields the items in opposite succession given a different direction", ->
@subject.openStore "TestObjects", (store) =>
i = 2
store.each(direction: @subject.queryDirection.descending).then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
itPromises " uses a range on the object keys", ->
@subject.openStore "TestObjects", (store) =>
i = 1
store.each(beginKey: 1, endKey: 1).then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
.then (items) ->
expect(items.length).toEqual(1)
itPromises " can operate on an index", ->
@subject.openStore "ComplexTestObjects", (store) ->
i = 2
store.eachBy("name").then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
describe "#upsert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "when openStore returns nothing it doesn't fail", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
return
@subject.openStore "TestObjects", (store) ->
store.getAll().then (objects) ->
console.log("got all objects?", objects)
expect(objects.length).toEqual(1)
itPromises "can add an item of the same key twice", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"})
store.upsert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(true).toBeFalsy()
.then ->
expect(true).toBeTruthy()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.upsert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
describe "#insert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "cannot add an item of the same key twice", ->
successCb = sinon.spy()
failedCb = sinon.spy()
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"})
store.insert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(errorMessage).toEqual("Key already exists in the object store.")
failedCb()
return $q.reject("expected")
.then(successCb)
.catch (error) ->
#We expect the overall transaction to also fail
expect(error).toEqual("Transaction Error")
return
.finally ->
expect(successCb).not.toHaveBeenCalled()
expect(failedCb).toHaveBeenCalled()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
itPromises "does nothing for no items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([]).then ->
expect(true).toBeTruthy()
| true | 'use strict'
describe "$indexedDB", ->
providerConfig = {}
$q = {}
beforeEach ->
angular.module('indexedDB').config ($indexedDBProvider) ->
providerConfig = $indexedDBProvider
module 'indexedDB'
inject(->)
itPromises = (message, testFunc) ->
it message, (done) ->
successCb = sinon.spy()
testFunc.apply(this, []).then(successCb).catch (error) ->
console.error "Unhandled failure from test: #{error}"
expect(false).toBeTruthy()
.finally ->
done()
promiseBefore = (beforeFunc) ->
beforeEach (done) ->
beforeFunc.apply(this, []).finally(done)
beforeEach inject ($indexedDB, _$q_) ->
@subject = $indexedDB
$q = _$q_
beforeEach ->
providerConfig.connection("testDB")
.upgradeDatabase 1, (event, db, txn) ->
db.createObjectStore "TestObjects", keyPath: 'id'
.upgradeDatabase 2, (event, db, txn) ->
store = db.createObjectStore "ComplexTestObjects", keyPath: 'id'
store.createIndex "name", "name", unique: false
afterEach (done) ->
@subject.deleteDatabase().finally(done)
describe "#openStores", ->
itPromises "returns the object stores", ->
@subject.openStores ["TestObjects","ComplexTestObjects"] , (store1, store2) ->
store1.insert({id: 1, data : "foo"})
store2.insert({id: 2, name: "PI:NAME:<NAME>END_PI"})
store1.getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
itPromises "to cause a failure when the store does not exist.", ->
success = sinon.spy()
fail = sinon.spy()
@subject.openStores ["TestObjects","NonExistentObjects"] , success
.then(success,fail)
.finally ->
expect(fail).toHaveBeenCalledWith("Object stores TestObjects,NonExistentObjects do not exist.")
expect(success).not.toHaveBeenCalled()
describe "#openAllStores", ->
itPromises "returns all the object stores", ->
@subject.openAllStores (stores...) ->
expect(stores.length).toEqual(2)
stores[0].insert({id: 1, data : "foo"})
stores[1].insert({id: 2, name: "PI:NAME:<NAME>END_PI"})
stores[0].getAllKeys().then (keys) ->
expect(keys.length).toEqual(1)
describe '#flush', ->
itPromises "it flushes any waiting transactions", ->
@subject.openStore "TestObjects", (store) =>
for i in [0 .. 10000]
store.insert([
{id: i, data: "foo", extra: "a" * i}
])
@subject.flush()
describe '#openStore', ->
itPromises "returns the object store", ->
@subject.openStore "TestObjects", (store) ->
store.getAllKeys().then (keys) ->
expect(keys.length).toEqual(0)
itPromises "throws an error for non-existent stores", ->
notCalled = sinon.spy()
called = sinon.spy()
@subject.openStore("NoSuchStore",notCalled).catch (problem) ->
expect(problem).toEqual("Object stores NoSuchStore do not exist.")
called()
.finally ->
expect(notCalled).not.toHaveBeenCalled()
expect(called).toHaveBeenCalled()
describe "multiple transactions", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can open a transaction within a transaction", ->
@subject.openStore "TestObjects", (store) =>
p = store.insert
@subject.openStore "TestObjects", (store2) ->
expect( store2 ).toBeTruthy()
describe "#delete", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "can delete an item", ->
@subject.openStore "TestObjects", (store) ->
store.delete(1)
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].id).toEqual(2)
itPromises "errors gracefully when it doesn't exist", ->
@subject.openStore "TestObjects", (store) ->
store.delete(55)
.catch ->
expect(true).toBeFalsy()
describe "#query", ->
promiseBefore ->
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"},
{id: 3, data: "woof", name: "zzz"}
]
)
itPromises "iterates by the index name with lt and lte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$lt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(2)
store.findWhere(store.query().$index("name").$lte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[0].id).toEqual(2)
expect( results[1].id).toEqual(1)
itPromises "iterates by the index name with gt and gte", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name")).then (results) ->
expect( results[0].id ).toEqual(2)
store.findWhere(store.query().$index("name").$gt("bbb")).then (results) ->
expect( results.length).toEqual(1)
expect( results[0].id).toEqual(3)
store.findWhere(store.query().$index("name").$gte("bbb")).then (results) ->
expect( results.length).toEqual(2)
expect( results[1].id).toEqual(3)
expect( results[0].id).toEqual(1)
itPromises "finds one object with $eq", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$eq("bbb")).then (results) ->
expect( results[0].id ).toEqual(1)
expect( results.length).toEqual(1)
itPromises "finds two objects with $between", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$between("aaa","bbb")).then (results) ->
expect( results[0].id ).toEqual(2)
expect( results.length).toEqual(2)
itPromises "orders differently with $desc", ->
@subject.openStore "ComplexTestObjects", (store) ->
store.findWhere(store.query().$index("name").$desc()).then (results) ->
expect( results[0].id ).toEqual(3)
expect( results.length).toEqual(3)
describe "#find", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo"},
{id: 2, data: "bar"}
])
itPromises "finds an existing item", ->
@subject.openStore "TestObjects", (store) ->
store.find(1).then (item) ->
expect(item.data).toEqual("foo")
itPromises "returns the result of the callback to the receiver", ->
@subject.openStore "TestObjects", (store) ->
store.find(1)
.then (item) ->
expect(item.data).toEqual("foo")
true
itPromises "does not find a non-existent item", ->
@subject.openStore "TestObjects", (store) ->
store.find(404).then (item) ->
expect(false).toBeTruthy()
.catch (error) ->
expect(true).toBeTruthy()
describe "#each", ->
promiseBefore ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
])
@subject.openStore "ComplexTestObjects", (store) ->
store.insert([
{id: 1, data: "foo", name: "bbb"},
{id: 2, data: "bar", name: "aaa"}
]
)
itPromises " yields the items in succession", ->
@subject.openStore "TestObjects", (store) ->
i = 1
store.each().then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
itPromises " yields the items in opposite succession given a different direction", ->
@subject.openStore "TestObjects", (store) =>
i = 2
store.each(direction: @subject.queryDirection.descending).then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
itPromises " uses a range on the object keys", ->
@subject.openStore "TestObjects", (store) =>
i = 1
store.each(beginKey: 1, endKey: 1).then null,null, (item) ->
expect(item.id).toEqual(i)
i += 1
.then (items) ->
expect(items.length).toEqual(1)
itPromises " can operate on an index", ->
@subject.openStore "ComplexTestObjects", (store) ->
i = 2
store.eachBy("name").then null,null, (item) ->
expect(item.id).toEqual(i)
i -= 1
describe "#upsert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "when openStore returns nothing it doesn't fail", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
return
@subject.openStore "TestObjects", (store) ->
store.getAll().then (objects) ->
console.log("got all objects?", objects)
expect(objects.length).toEqual(1)
itPromises "can add an item of the same key twice", ->
@subject.openStore "TestObjects", (store) ->
store.upsert({id: 1, data: "something"})
store.upsert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(true).toBeFalsy()
.then ->
expect(true).toBeTruthy()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.upsert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
describe "#insert", ->
itPromises "adds the item", ->
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"}).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(1)
expect(objects[0].data).toEqual("something")
store.find(1).then (object) ->
expect(object.id).toEqual(1)
itPromises "cannot add an item of the same key twice", ->
successCb = sinon.spy()
failedCb = sinon.spy()
@subject.openStore "TestObjects", (store) ->
store.insert({id: 1, data: "something"})
store.insert({id: 1, data: "somethingelse"}).catch (errorMessage) ->
expect(errorMessage).toEqual("Key already exists in the object store.")
failedCb()
return $q.reject("expected")
.then(successCb)
.catch (error) ->
#We expect the overall transaction to also fail
expect(error).toEqual("Transaction Error")
return
.finally ->
expect(successCb).not.toHaveBeenCalled()
expect(failedCb).toHaveBeenCalled()
itPromises "can add multiple items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([
{id: 1, data: "1"},
{id: 2, data: "2"}
]).then (result) ->
expect(result).toBeTruthy()
store.getAll().then (objects) ->
expect(objects.length).toEqual(2)
store.count().then (count) ->
expect(count).toEqual(2)
itPromises "does nothing for no items", ->
@subject.openStore "TestObjects", (store) ->
store.insert([]).then ->
expect(true).toBeTruthy()
|
[
{
"context": "-strings')\n\ndebugger\n\nTEST_STRINGS = [{\n input: \"Bob went to the store\"\n expected: \"bob-went-to-the-s",
"end": 83,
"score": 0.9990279674530029,
"start": 80,
"tag": "NAME",
"value": "Bob"
}
] | test/dasherize.coffee | littlebee/bumble-strings | 0 |
BStr = require('../src/bumble-strings')
debugger
TEST_STRINGS = [{
input: "Bob went to the store"
expected: "bob-went-to-the-store"
why: "Should have converted the whole string to lower case and replaced all spaces with dashes"
},{
input: "someLowerCamelCasedThing"
expected: "some-lower-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
},{
input: "SomeUpperCamelCasedThing"
expected: "some-upper-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
}]
describe "dasherize()", ->
it "should dasherize", ->
for testString in TEST_STRINGS
output = BStr.dasherize(testString.input)
expect(output).to.equal(testString.expected, testString.why)
| 114859 |
BStr = require('../src/bumble-strings')
debugger
TEST_STRINGS = [{
input: "<NAME> went to the store"
expected: "bob-went-to-the-store"
why: "Should have converted the whole string to lower case and replaced all spaces with dashes"
},{
input: "someLowerCamelCasedThing"
expected: "some-lower-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
},{
input: "SomeUpperCamelCasedThing"
expected: "some-upper-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
}]
describe "dasherize()", ->
it "should dasherize", ->
for testString in TEST_STRINGS
output = BStr.dasherize(testString.input)
expect(output).to.equal(testString.expected, testString.why)
| true |
BStr = require('../src/bumble-strings')
debugger
TEST_STRINGS = [{
input: "PI:NAME:<NAME>END_PI went to the store"
expected: "bob-went-to-the-store"
why: "Should have converted the whole string to lower case and replaced all spaces with dashes"
},{
input: "someLowerCamelCasedThing"
expected: "some-lower-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
},{
input: "SomeUpperCamelCasedThing"
expected: "some-upper-camel-cased-thing"
why: "Should have converted lower camel case to dashes"
}]
describe "dasherize()", ->
it "should dasherize", ->
for testString in TEST_STRINGS
output = BStr.dasherize(testString.input)
expect(output).to.equal(testString.expected, testString.why)
|
[
{
"context": "# Copyright (c) 2015 Jesse Grosjean. All rights reserved.\n\nFoldingTextService = requi",
"end": 35,
"score": 0.9997214078903198,
"start": 21,
"tag": "NAME",
"value": "Jesse Grosjean"
}
] | atom/packages/foldingtext-for-atom/lib/extensions/ui/list-input-element.coffee | prookie/dotfiles-1 | 0 | # Copyright (c) 2015 Jesse Grosjean. All rights reserved.
FoldingTextService = require '../../foldingtext-service'
{Disposable, CompositeDisposable} = require 'atom'
fuzzyFilter = null # defer until used
class ListInputElement extends HTMLElement
items: []
maxItems: Infinity
allowNewItems: true
allowMultipleItems: true
allowEmptySelection: true
scheduleTimeout: null
inputThrottle: 50
createdCallback: ->
@insetPanel = document.createElement 'div'
@insetPanel.classList.add 'inset-panel'
@panelHeading = document.createElement 'div'
@panelHeading.classList.add 'panel-heading'
@panelHeading.textContent = 'hello heading'
@insetPanel.appendChild @panelHeading
@panelBody = document.createElement 'div'
@panelBody.classList.add 'panel-body'
@insetPanel.appendChild @panelBody
@appendChild @insetPanel
@classList.add 'select-list'
@list = document.createElement 'ol'
@list.classList.add 'list-group'
@setTextInputElement document.createElement 'ft-text-input'
attachedCallback: ->
detachedCallback: ->
attributeChangedCallback: (attrName, oldVal, newVal) ->
###
Section: Text Input
###
getTextInputElement: ->
@textInputElement
setTextInputElement: (textInputElement) ->
if @textInputElement
@textInputElement.parentElement.removeChild @textInputElement
@textInputElement = textInputElement
if @textInputElement
@panelBody.insertBefore @textInputElement, @panelBody.firstChild
@textInputElement.addAccessoryElement @list
###
Section: Delegate
###
getDelegate: ->
@textInputElement.getDelegate()
setDelegate: (delegate) ->
originalDidChangeText = delegate.didChangeText?.bind(delegate)
originalCanceled = delegate.canceled?.bind(delegate)
delegate.didChangeText = (e) =>
@schedulePopulateList()
originalDidChangeText?(e)
delegate.canceled = =>
@list.innerHTML = ''
originalCanceled?()
@textInputElement.setDelegate(delegate)
@populateList()
###
Section: Text
###
getFilterKey: ->
getText: ->
@textInputElement.getText()
setText: (text) ->
@textInputElement.setText text
###
Section: Managing the list of items
###
getSelectedItem: ->
@getSelectedItemElement()?._item
setSelectedItem: (item) ->
@selectItemElement @getElementForItem item
setItems: (@items=[]) ->
@populateList()
setMaxItems: (@maxItems) ->
reloadItem: (item) ->
if itemElement = @getElementForItem item
newItemElement = @getDelegate().elementForListItem(item)
newItemElement._item = item
itemElement.parentElement.replaceChild(newItemElement, itemElement)
populateList: ->
return unless @items?
selectedItem = @getSelectedItem()
filterQuery = @getText()
if filterQuery.length
fuzzyFilter ?= require('fuzzaldrin').filter
filteredItems = fuzzyFilter(@items, filterQuery, key: @getFilterKey())
else
filteredItems = @items
@list.innerHTML = ''
if filteredItems.length
@list.style.display = null
for i in [0...Math.min(filteredItems.length, @maxItems)]
item = filteredItems[i]
itemElement = @getDelegate().elementForListItem(item)
itemElement._item = item
@list.appendChild(itemElement)
if selectedElement = @getElementForItem selectedItem
@selectItemElement(selectedElement)
else if not @allowEmptySelection
@selectItemElement(@list.firstChild)
else
@list.style.display = 'none'
###
Section: Allow Mark Active
###
getAllowMarkActive: ->
@allowMarkActive
setAllowMarkActive: (allowMarkActive) ->
unless @allowMarkActive is allowMarkActive
@allowMarkActive = allowMarkActive
if allowMarkActive
@list.classList.add 'mark-active'
else
@list.classList.remove 'mark-active'
###
Section: Messages to the user
###
getEmptyMessage: (itemCount, filteredItemCount) ->
emptyMessage = @getDelegate().getEmptyMessage?(itemCount, filteredItemCount)
emptyMessage ?= 'No matches found'
emptyMessage
###
Section: Element Actions
###
focusTextEditor: ->
@textInputElement.focusTextEditor()
###
Section: Private
###
selectFirstElement: (e) ->
@selectItemElement(@list.firstChild)
@list.scrollTop = 0
e?.stopImmediatePropagation()
selectLastElement: (e) ->
@selectItemElement(@list.lastChild)
@list.scrollTop = @list.scrollHeight
e?.stopImmediatePropagation()
selectPreviousItemElement: (e) ->
current = @getSelectedItemElement()
previous = current?.previousSibling
if not previous and not current
previous = @list.lastChild
if previous
@selectItemElement(previous)
e?.stopImmediatePropagation()
selectNextItemElement: (e) ->
current = @getSelectedItemElement()
next = current?.nextSibling
if not next and not current
next = @list.firstChild
if next
@selectItemElement(next)
e?.stopImmediatePropagation()
selectItemElement: (element) ->
oldSelected = @getSelectedItemElement()
unless element is oldSelected
delegate = @getDelegate()
delegate.willSelectListItem?(element?._item)
@getSelectedItemElement()?.classList.remove 'selected'
if element and not element.classList.contains 'selected'
element.classList.add('selected')
@scrollToItemElement(element)
delegate.didSelectListItem?(element?._item)
clearListSelection: ->
@selectItemElement(null)
clearListSelectionOnTextMovement: ->
@clearListSelection()
scrollToItemElement: (element) ->
scrollTop = @list.scrollTop
listRect = @list.getBoundingClientRect()
elementRect = element.getBoundingClientRect()
if elementRect.bottom > listRect.bottom
@list.scrollTop += (elementRect.bottom - listRect.bottom)
else if elementRect.top < listRect.top
@list.scrollTop += (elementRect.top - listRect.top)
getSelectedItemElement: ->
for each in @list.children
if each.classList.contains 'selected'
return each
getElementForItem: (item) ->
for each in @list.children
if each._item is item
return each
schedulePopulateList: ->
clearTimeout(@scheduleTimeout)
populateCallback = =>
@populateList() if document.contains(this)
@scheduleTimeout = setTimeout(populateCallback, @inputThrottle)
liForNode = (node) ->
while node and node.tagName isnt 'LI'
node = node.parentElement
node
listInputForNode = (node) ->
while node and node.tagName isnt 'FT-LIST-INPUT'
node = node.parentElement
node
FoldingTextService.eventRegistery.listen 'ft-list-input ft-text-input > atom-panel > .list-group',
# This prevents the focusout event from firing on the filter editor element
# when the list is scrolled by clicking the scrollbar and dragging.
mousedown: (e) ->
listInputForNode(this).selectItemElement(liForNode(e.target))
e.preventDefault()
e.stopPropagation()
click: (e) ->
listInput = listInputForNode(this)
li = liForNode(e.target)
if li?.classList.contains('selected')
if listInput.getDelegate().mouseClickListItem
listInput.getDelegate().mouseClickListItem(e)
e.preventDefault()
e.stopPropagation()
atom.commands.add 'ft-list-input ft-text-input > atom-text-editor[mini]',
'core:move-up': (e) -> listInputForNode(this).selectPreviousItemElement(e)
'core:move-down': (e) -> listInputForNode(this).selectNextItemElement(e)
'core:move-to-top': (e) -> listInputForNode(this).selectFirstElement(e)
'core:move-to-bottom': (e) -> listInputForNode(this).selectLastElement(e)
'editor:move-to-first-character-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-backward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-left': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-forward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-right': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-screen-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
module.exports = document.registerElement 'ft-list-input', prototype: ListInputElement.prototype | 210122 | # Copyright (c) 2015 <NAME>. All rights reserved.
FoldingTextService = require '../../foldingtext-service'
{Disposable, CompositeDisposable} = require 'atom'
fuzzyFilter = null # defer until used
class ListInputElement extends HTMLElement
items: []
maxItems: Infinity
allowNewItems: true
allowMultipleItems: true
allowEmptySelection: true
scheduleTimeout: null
inputThrottle: 50
createdCallback: ->
@insetPanel = document.createElement 'div'
@insetPanel.classList.add 'inset-panel'
@panelHeading = document.createElement 'div'
@panelHeading.classList.add 'panel-heading'
@panelHeading.textContent = 'hello heading'
@insetPanel.appendChild @panelHeading
@panelBody = document.createElement 'div'
@panelBody.classList.add 'panel-body'
@insetPanel.appendChild @panelBody
@appendChild @insetPanel
@classList.add 'select-list'
@list = document.createElement 'ol'
@list.classList.add 'list-group'
@setTextInputElement document.createElement 'ft-text-input'
attachedCallback: ->
detachedCallback: ->
attributeChangedCallback: (attrName, oldVal, newVal) ->
###
Section: Text Input
###
getTextInputElement: ->
@textInputElement
setTextInputElement: (textInputElement) ->
if @textInputElement
@textInputElement.parentElement.removeChild @textInputElement
@textInputElement = textInputElement
if @textInputElement
@panelBody.insertBefore @textInputElement, @panelBody.firstChild
@textInputElement.addAccessoryElement @list
###
Section: Delegate
###
getDelegate: ->
@textInputElement.getDelegate()
setDelegate: (delegate) ->
originalDidChangeText = delegate.didChangeText?.bind(delegate)
originalCanceled = delegate.canceled?.bind(delegate)
delegate.didChangeText = (e) =>
@schedulePopulateList()
originalDidChangeText?(e)
delegate.canceled = =>
@list.innerHTML = ''
originalCanceled?()
@textInputElement.setDelegate(delegate)
@populateList()
###
Section: Text
###
getFilterKey: ->
getText: ->
@textInputElement.getText()
setText: (text) ->
@textInputElement.setText text
###
Section: Managing the list of items
###
getSelectedItem: ->
@getSelectedItemElement()?._item
setSelectedItem: (item) ->
@selectItemElement @getElementForItem item
setItems: (@items=[]) ->
@populateList()
setMaxItems: (@maxItems) ->
reloadItem: (item) ->
if itemElement = @getElementForItem item
newItemElement = @getDelegate().elementForListItem(item)
newItemElement._item = item
itemElement.parentElement.replaceChild(newItemElement, itemElement)
populateList: ->
return unless @items?
selectedItem = @getSelectedItem()
filterQuery = @getText()
if filterQuery.length
fuzzyFilter ?= require('fuzzaldrin').filter
filteredItems = fuzzyFilter(@items, filterQuery, key: @getFilterKey())
else
filteredItems = @items
@list.innerHTML = ''
if filteredItems.length
@list.style.display = null
for i in [0...Math.min(filteredItems.length, @maxItems)]
item = filteredItems[i]
itemElement = @getDelegate().elementForListItem(item)
itemElement._item = item
@list.appendChild(itemElement)
if selectedElement = @getElementForItem selectedItem
@selectItemElement(selectedElement)
else if not @allowEmptySelection
@selectItemElement(@list.firstChild)
else
@list.style.display = 'none'
###
Section: Allow Mark Active
###
getAllowMarkActive: ->
@allowMarkActive
setAllowMarkActive: (allowMarkActive) ->
unless @allowMarkActive is allowMarkActive
@allowMarkActive = allowMarkActive
if allowMarkActive
@list.classList.add 'mark-active'
else
@list.classList.remove 'mark-active'
###
Section: Messages to the user
###
getEmptyMessage: (itemCount, filteredItemCount) ->
emptyMessage = @getDelegate().getEmptyMessage?(itemCount, filteredItemCount)
emptyMessage ?= 'No matches found'
emptyMessage
###
Section: Element Actions
###
focusTextEditor: ->
@textInputElement.focusTextEditor()
###
Section: Private
###
selectFirstElement: (e) ->
@selectItemElement(@list.firstChild)
@list.scrollTop = 0
e?.stopImmediatePropagation()
selectLastElement: (e) ->
@selectItemElement(@list.lastChild)
@list.scrollTop = @list.scrollHeight
e?.stopImmediatePropagation()
selectPreviousItemElement: (e) ->
current = @getSelectedItemElement()
previous = current?.previousSibling
if not previous and not current
previous = @list.lastChild
if previous
@selectItemElement(previous)
e?.stopImmediatePropagation()
selectNextItemElement: (e) ->
current = @getSelectedItemElement()
next = current?.nextSibling
if not next and not current
next = @list.firstChild
if next
@selectItemElement(next)
e?.stopImmediatePropagation()
selectItemElement: (element) ->
oldSelected = @getSelectedItemElement()
unless element is oldSelected
delegate = @getDelegate()
delegate.willSelectListItem?(element?._item)
@getSelectedItemElement()?.classList.remove 'selected'
if element and not element.classList.contains 'selected'
element.classList.add('selected')
@scrollToItemElement(element)
delegate.didSelectListItem?(element?._item)
clearListSelection: ->
@selectItemElement(null)
clearListSelectionOnTextMovement: ->
@clearListSelection()
scrollToItemElement: (element) ->
scrollTop = @list.scrollTop
listRect = @list.getBoundingClientRect()
elementRect = element.getBoundingClientRect()
if elementRect.bottom > listRect.bottom
@list.scrollTop += (elementRect.bottom - listRect.bottom)
else if elementRect.top < listRect.top
@list.scrollTop += (elementRect.top - listRect.top)
getSelectedItemElement: ->
for each in @list.children
if each.classList.contains 'selected'
return each
getElementForItem: (item) ->
for each in @list.children
if each._item is item
return each
schedulePopulateList: ->
clearTimeout(@scheduleTimeout)
populateCallback = =>
@populateList() if document.contains(this)
@scheduleTimeout = setTimeout(populateCallback, @inputThrottle)
liForNode = (node) ->
while node and node.tagName isnt 'LI'
node = node.parentElement
node
listInputForNode = (node) ->
while node and node.tagName isnt 'FT-LIST-INPUT'
node = node.parentElement
node
FoldingTextService.eventRegistery.listen 'ft-list-input ft-text-input > atom-panel > .list-group',
# This prevents the focusout event from firing on the filter editor element
# when the list is scrolled by clicking the scrollbar and dragging.
mousedown: (e) ->
listInputForNode(this).selectItemElement(liForNode(e.target))
e.preventDefault()
e.stopPropagation()
click: (e) ->
listInput = listInputForNode(this)
li = liForNode(e.target)
if li?.classList.contains('selected')
if listInput.getDelegate().mouseClickListItem
listInput.getDelegate().mouseClickListItem(e)
e.preventDefault()
e.stopPropagation()
atom.commands.add 'ft-list-input ft-text-input > atom-text-editor[mini]',
'core:move-up': (e) -> listInputForNode(this).selectPreviousItemElement(e)
'core:move-down': (e) -> listInputForNode(this).selectNextItemElement(e)
'core:move-to-top': (e) -> listInputForNode(this).selectFirstElement(e)
'core:move-to-bottom': (e) -> listInputForNode(this).selectLastElement(e)
'editor:move-to-first-character-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-backward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-left': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-forward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-right': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-screen-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
module.exports = document.registerElement 'ft-list-input', prototype: ListInputElement.prototype | true | # Copyright (c) 2015 PI:NAME:<NAME>END_PI. All rights reserved.
FoldingTextService = require '../../foldingtext-service'
{Disposable, CompositeDisposable} = require 'atom'
fuzzyFilter = null # defer until used
class ListInputElement extends HTMLElement
items: []
maxItems: Infinity
allowNewItems: true
allowMultipleItems: true
allowEmptySelection: true
scheduleTimeout: null
inputThrottle: 50
createdCallback: ->
@insetPanel = document.createElement 'div'
@insetPanel.classList.add 'inset-panel'
@panelHeading = document.createElement 'div'
@panelHeading.classList.add 'panel-heading'
@panelHeading.textContent = 'hello heading'
@insetPanel.appendChild @panelHeading
@panelBody = document.createElement 'div'
@panelBody.classList.add 'panel-body'
@insetPanel.appendChild @panelBody
@appendChild @insetPanel
@classList.add 'select-list'
@list = document.createElement 'ol'
@list.classList.add 'list-group'
@setTextInputElement document.createElement 'ft-text-input'
attachedCallback: ->
detachedCallback: ->
attributeChangedCallback: (attrName, oldVal, newVal) ->
###
Section: Text Input
###
getTextInputElement: ->
@textInputElement
setTextInputElement: (textInputElement) ->
if @textInputElement
@textInputElement.parentElement.removeChild @textInputElement
@textInputElement = textInputElement
if @textInputElement
@panelBody.insertBefore @textInputElement, @panelBody.firstChild
@textInputElement.addAccessoryElement @list
###
Section: Delegate
###
getDelegate: ->
@textInputElement.getDelegate()
setDelegate: (delegate) ->
originalDidChangeText = delegate.didChangeText?.bind(delegate)
originalCanceled = delegate.canceled?.bind(delegate)
delegate.didChangeText = (e) =>
@schedulePopulateList()
originalDidChangeText?(e)
delegate.canceled = =>
@list.innerHTML = ''
originalCanceled?()
@textInputElement.setDelegate(delegate)
@populateList()
###
Section: Text
###
getFilterKey: ->
getText: ->
@textInputElement.getText()
setText: (text) ->
@textInputElement.setText text
###
Section: Managing the list of items
###
getSelectedItem: ->
@getSelectedItemElement()?._item
setSelectedItem: (item) ->
@selectItemElement @getElementForItem item
setItems: (@items=[]) ->
@populateList()
setMaxItems: (@maxItems) ->
reloadItem: (item) ->
if itemElement = @getElementForItem item
newItemElement = @getDelegate().elementForListItem(item)
newItemElement._item = item
itemElement.parentElement.replaceChild(newItemElement, itemElement)
populateList: ->
return unless @items?
selectedItem = @getSelectedItem()
filterQuery = @getText()
if filterQuery.length
fuzzyFilter ?= require('fuzzaldrin').filter
filteredItems = fuzzyFilter(@items, filterQuery, key: @getFilterKey())
else
filteredItems = @items
@list.innerHTML = ''
if filteredItems.length
@list.style.display = null
for i in [0...Math.min(filteredItems.length, @maxItems)]
item = filteredItems[i]
itemElement = @getDelegate().elementForListItem(item)
itemElement._item = item
@list.appendChild(itemElement)
if selectedElement = @getElementForItem selectedItem
@selectItemElement(selectedElement)
else if not @allowEmptySelection
@selectItemElement(@list.firstChild)
else
@list.style.display = 'none'
###
Section: Allow Mark Active
###
getAllowMarkActive: ->
@allowMarkActive
setAllowMarkActive: (allowMarkActive) ->
unless @allowMarkActive is allowMarkActive
@allowMarkActive = allowMarkActive
if allowMarkActive
@list.classList.add 'mark-active'
else
@list.classList.remove 'mark-active'
###
Section: Messages to the user
###
getEmptyMessage: (itemCount, filteredItemCount) ->
emptyMessage = @getDelegate().getEmptyMessage?(itemCount, filteredItemCount)
emptyMessage ?= 'No matches found'
emptyMessage
###
Section: Element Actions
###
focusTextEditor: ->
@textInputElement.focusTextEditor()
###
Section: Private
###
selectFirstElement: (e) ->
@selectItemElement(@list.firstChild)
@list.scrollTop = 0
e?.stopImmediatePropagation()
selectLastElement: (e) ->
@selectItemElement(@list.lastChild)
@list.scrollTop = @list.scrollHeight
e?.stopImmediatePropagation()
selectPreviousItemElement: (e) ->
current = @getSelectedItemElement()
previous = current?.previousSibling
if not previous and not current
previous = @list.lastChild
if previous
@selectItemElement(previous)
e?.stopImmediatePropagation()
selectNextItemElement: (e) ->
current = @getSelectedItemElement()
next = current?.nextSibling
if not next and not current
next = @list.firstChild
if next
@selectItemElement(next)
e?.stopImmediatePropagation()
selectItemElement: (element) ->
oldSelected = @getSelectedItemElement()
unless element is oldSelected
delegate = @getDelegate()
delegate.willSelectListItem?(element?._item)
@getSelectedItemElement()?.classList.remove 'selected'
if element and not element.classList.contains 'selected'
element.classList.add('selected')
@scrollToItemElement(element)
delegate.didSelectListItem?(element?._item)
clearListSelection: ->
@selectItemElement(null)
clearListSelectionOnTextMovement: ->
@clearListSelection()
scrollToItemElement: (element) ->
scrollTop = @list.scrollTop
listRect = @list.getBoundingClientRect()
elementRect = element.getBoundingClientRect()
if elementRect.bottom > listRect.bottom
@list.scrollTop += (elementRect.bottom - listRect.bottom)
else if elementRect.top < listRect.top
@list.scrollTop += (elementRect.top - listRect.top)
getSelectedItemElement: ->
for each in @list.children
if each.classList.contains 'selected'
return each
getElementForItem: (item) ->
for each in @list.children
if each._item is item
return each
schedulePopulateList: ->
clearTimeout(@scheduleTimeout)
populateCallback = =>
@populateList() if document.contains(this)
@scheduleTimeout = setTimeout(populateCallback, @inputThrottle)
liForNode = (node) ->
while node and node.tagName isnt 'LI'
node = node.parentElement
node
listInputForNode = (node) ->
while node and node.tagName isnt 'FT-LIST-INPUT'
node = node.parentElement
node
FoldingTextService.eventRegistery.listen 'ft-list-input ft-text-input > atom-panel > .list-group',
# This prevents the focusout event from firing on the filter editor element
# when the list is scrolled by clicking the scrollbar and dragging.
mousedown: (e) ->
listInputForNode(this).selectItemElement(liForNode(e.target))
e.preventDefault()
e.stopPropagation()
click: (e) ->
listInput = listInputForNode(this)
li = liForNode(e.target)
if li?.classList.contains('selected')
if listInput.getDelegate().mouseClickListItem
listInput.getDelegate().mouseClickListItem(e)
e.preventDefault()
e.stopPropagation()
atom.commands.add 'ft-list-input ft-text-input > atom-text-editor[mini]',
'core:move-up': (e) -> listInputForNode(this).selectPreviousItemElement(e)
'core:move-down': (e) -> listInputForNode(this).selectNextItemElement(e)
'core:move-to-top': (e) -> listInputForNode(this).selectFirstElement(e)
'core:move-to-bottom': (e) -> listInputForNode(this).selectLastElement(e)
'editor:move-to-first-character-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-beginning-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-backward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-left': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-word': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-forward': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'core:move-right': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-screen-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-line': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
'editor:move-to-end-of-paragraph': (e) -> listInputForNode(this).clearListSelectionOnTextMovement(e)
module.exports = document.registerElement 'ft-list-input', prototype: ListInputElement.prototype |
[
{
"context": ">\n clean = (key) ->\n key = key.replace /:/g, 'IDBI'\n key.replace /\\//g, 'IIDI'\n unclean = (key) ",
"end": 181,
"score": 0.908519983291626,
"start": 177,
"tag": "KEY",
"value": "IDBI"
},
{
"context": " key.replace /:/g, 'IDBI'\n key.replace /\\//g, 'IIDI'... | src/local.coffee | ndxbxrme/ndxdb | 1 | 'use strict'
settings = require './settings'
glob = require 'glob'
fs = require 'fs'
path = require 'path'
module.exports = ->
clean = (key) ->
key = key.replace /:/g, 'IDBI'
key.replace /\//g, 'IIDI'
unclean = (key) ->
key = key.replace /IDBI/g, ':'
key = key.replace /IIDI/g, '/'
regex = new RegExp '^' + path.join(settings.LOCAL_STORAGE) + '\\\/'
key.replace regex, ''
checkDataDir: ->
if settings.LOCAL_STORAGE
exists = fs.existsSync path.join(settings.LOCAL_STORAGE)
if not exists
fs.mkdirSync path.join(settings.LOCAL_STORAGE)
keys: (from, prefix, cb) ->
ls = path.join(settings.LOCAL_STORAGE).replace(/\\/g, '/') + '/'
glob path.join(settings.LOCAL_STORAGE, clean(prefix) + '*.json'), (e, r) ->
if e
return cb e, null
i = -1
count = 0
gotFrom = not from
output =
Contents: []
IsTruncated: false
while ++i < r.length and count < 1000
r[i] = r[i].replace ls, ''
if gotFrom
output.Contents.push
Key: unclean r[i].replace('.json', '')
count++
else
if unclean(r[i]) is from + '.json'
gotFrom = true
if i < r.length
output.IsTruncated = true
cb? null, output
del: (key, cb) ->
try
fs.unlinkSync path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
cb? null, null
catch e
cb? e, null
put: (key, o, cb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.writeFile uri, JSON.stringify(o), (e) ->
cb? e, null
get: (key, cb) ->
fs.readFile path.join(settings.LOCAL_STORAGE, clean(key) + '.json'), 'utf8', (e, r) ->
d = null
if e and e.code and e.code is 'ENOENT'
return cb? 'ENOENT', null
try
d = JSON.parse r
catch e
return cb?(e or 'error', null)
cb? e, d
getReadStream: (key) ->
fs.createReadStream path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
getWriteStream: (key, errorCb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.createWriteStream uri | 10788 | 'use strict'
settings = require './settings'
glob = require 'glob'
fs = require 'fs'
path = require 'path'
module.exports = ->
clean = (key) ->
key = key.replace /:/g, '<KEY>'
key.replace /\//g, '<KEY>'
unclean = (key) ->
key = key.replace /IDBI/g, ':'
key = key.replace /IIDI/g, '/'
regex = new RegExp '^' + path.join(settings.LOCAL_STORAGE) + '\\\/'
key.replace regex, ''
checkDataDir: ->
if settings.LOCAL_STORAGE
exists = fs.existsSync path.join(settings.LOCAL_STORAGE)
if not exists
fs.mkdirSync path.join(settings.LOCAL_STORAGE)
keys: (from, prefix, cb) ->
ls = path.join(settings.LOCAL_STORAGE).replace(/\\/g, '/') + '/'
glob path.join(settings.LOCAL_STORAGE, clean(prefix) + '*.json'), (e, r) ->
if e
return cb e, null
i = -1
count = 0
gotFrom = not from
output =
Contents: []
IsTruncated: false
while ++i < r.length and count < 1000
r[i] = r[i].replace ls, ''
if gotFrom
output.Contents.push
Key: unclean r[i].replace('.json', '')
count++
else
if unclean(r[i]) is from + '.json'
gotFrom = true
if i < r.length
output.IsTruncated = true
cb? null, output
del: (key, cb) ->
try
fs.unlinkSync path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
cb? null, null
catch e
cb? e, null
put: (key, o, cb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.writeFile uri, JSON.stringify(o), (e) ->
cb? e, null
get: (key, cb) ->
fs.readFile path.join(settings.LOCAL_STORAGE, clean(key) + '.json'), 'utf8', (e, r) ->
d = null
if e and e.code and e.code is 'ENOENT'
return cb? 'ENOENT', null
try
d = JSON.parse r
catch e
return cb?(e or 'error', null)
cb? e, d
getReadStream: (key) ->
fs.createReadStream path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
getWriteStream: (key, errorCb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.createWriteStream uri | true | 'use strict'
settings = require './settings'
glob = require 'glob'
fs = require 'fs'
path = require 'path'
module.exports = ->
clean = (key) ->
key = key.replace /:/g, 'PI:KEY:<KEY>END_PI'
key.replace /\//g, 'PI:KEY:<KEY>END_PI'
unclean = (key) ->
key = key.replace /IDBI/g, ':'
key = key.replace /IIDI/g, '/'
regex = new RegExp '^' + path.join(settings.LOCAL_STORAGE) + '\\\/'
key.replace regex, ''
checkDataDir: ->
if settings.LOCAL_STORAGE
exists = fs.existsSync path.join(settings.LOCAL_STORAGE)
if not exists
fs.mkdirSync path.join(settings.LOCAL_STORAGE)
keys: (from, prefix, cb) ->
ls = path.join(settings.LOCAL_STORAGE).replace(/\\/g, '/') + '/'
glob path.join(settings.LOCAL_STORAGE, clean(prefix) + '*.json'), (e, r) ->
if e
return cb e, null
i = -1
count = 0
gotFrom = not from
output =
Contents: []
IsTruncated: false
while ++i < r.length and count < 1000
r[i] = r[i].replace ls, ''
if gotFrom
output.Contents.push
Key: unclean r[i].replace('.json', '')
count++
else
if unclean(r[i]) is from + '.json'
gotFrom = true
if i < r.length
output.IsTruncated = true
cb? null, output
del: (key, cb) ->
try
fs.unlinkSync path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
cb? null, null
catch e
cb? e, null
put: (key, o, cb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.writeFile uri, JSON.stringify(o), (e) ->
cb? e, null
get: (key, cb) ->
fs.readFile path.join(settings.LOCAL_STORAGE, clean(key) + '.json'), 'utf8', (e, r) ->
d = null
if e and e.code and e.code is 'ENOENT'
return cb? 'ENOENT', null
try
d = JSON.parse r
catch e
return cb?(e or 'error', null)
cb? e, d
getReadStream: (key) ->
fs.createReadStream path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
getWriteStream: (key, errorCb) ->
uri = path.join(settings.LOCAL_STORAGE, clean(key) + '.json')
fs.createWriteStream uri |
[
{
"context": " \"greekonstreet\" : \"359790551186407425\",\n \"BettyCrockski\" : \"463677794044502016\",\n \"Macarollin\" ",
"end": 871,
"score": 0.9992278218269348,
"start": 858,
"tag": "NAME",
"value": "BettyCrockski"
},
{
"context": " \"BettyCrockski\" : \"463... | assets/food.coffee | coworkbuffalo/coworkbuffalo.github.com | 1 | $ ->
$.supersized slides: [{image: '/images/buffalo_1900.jpg'}]
trucks = {
"whereslloyd" : "344853196625309696",
"RoamingBuffalo1" : "344857092533911552",
"theBMFT" : "344858519096397824",
"FindFrankNow" : "344858686004535296",
"amys_truck" : "344858789989728256",
"RnRBBQTruck" : "344858857568362496",
"SweetHearthNY" : "344858930691833856",
"HouseOfMunch" : "344859058567790596",
"TheKnightSlider" : "344859260351561729",
"MobileWoodFire" : "344859315275964417",
"WholeHogTruck" : "344859366903656448",
"PaniniTruckBflo" : "344859430824845313",
"buffalosbestbbq" : "463678306605232130",
"thaimeuptruck" : "344859485296271360",
"TheCheesyChick" : "344859534478675968",
"greekonstreet" : "359790551186407425",
"BettyCrockski" : "463677794044502016",
"Macarollin" : "463678250967773184",
"Gr8Foodini" : "463678118729773057",
"flaminfishtruck" : "620631633335443457",
"findthecrawdad" : "620631887778721792",
"streeteatsbflo" : "620632069501161472",
"PhillyFlattop" : "620632249898143744",
"716ClubHouse" : "620633236197109760",
"taffystruck" : "620632437719109632",
"Sassi_Cakes" : "620632908299005952",
"bigsuzies" : "620633102260416512"
}
$list = $("#trucks")
for truck, widget of trucks
$list.append "<li id='#{truck.toLowerCase()}'>
<header><a href='https://twitter.com/#{truck}'>@#{truck}</a></header>
<a class='twitter-timeline' data-tweet-limit='3' data-dnt='true' data-chrome='noheader nofooter transparent' href='https://twitter.com/#{truck}' data-widget-id='#{widget}'>
</a>
</li>"
| 167876 | $ ->
$.supersized slides: [{image: '/images/buffalo_1900.jpg'}]
trucks = {
"whereslloyd" : "344853196625309696",
"RoamingBuffalo1" : "344857092533911552",
"theBMFT" : "344858519096397824",
"FindFrankNow" : "344858686004535296",
"amys_truck" : "344858789989728256",
"RnRBBQTruck" : "344858857568362496",
"SweetHearthNY" : "344858930691833856",
"HouseOfMunch" : "344859058567790596",
"TheKnightSlider" : "344859260351561729",
"MobileWoodFire" : "344859315275964417",
"WholeHogTruck" : "344859366903656448",
"PaniniTruckBflo" : "344859430824845313",
"buffalosbestbbq" : "463678306605232130",
"thaimeuptruck" : "344859485296271360",
"TheCheesyChick" : "344859534478675968",
"greekonstreet" : "359790551186407425",
"<NAME>" : "463677794044502016",
"<NAME>" : "463678250967773184",
"Gr8<NAME>ini" : "463678118729773057",
"flaminfishtruck" : "620631633335443457",
"findthecrawdad" : "620631887778721792",
"streeteatsbflo" : "620632069501161472",
"<NAME>" : "620632249898143744",
"716ClubHouse" : "620633236197109760",
"taffystruck" : "620632437719109632",
"<NAME>_<NAME>" : "620632908299005952",
"bigsuzies" : "620633102260416512"
}
$list = $("#trucks")
for truck, widget of trucks
$list.append "<li id='#{truck.toLowerCase()}'>
<header><a href='https://twitter.com/#{truck}'>@#{truck}</a></header>
<a class='twitter-timeline' data-tweet-limit='3' data-dnt='true' data-chrome='noheader nofooter transparent' href='https://twitter.com/#{truck}' data-widget-id='#{widget}'>
</a>
</li>"
| true | $ ->
$.supersized slides: [{image: '/images/buffalo_1900.jpg'}]
trucks = {
"whereslloyd" : "344853196625309696",
"RoamingBuffalo1" : "344857092533911552",
"theBMFT" : "344858519096397824",
"FindFrankNow" : "344858686004535296",
"amys_truck" : "344858789989728256",
"RnRBBQTruck" : "344858857568362496",
"SweetHearthNY" : "344858930691833856",
"HouseOfMunch" : "344859058567790596",
"TheKnightSlider" : "344859260351561729",
"MobileWoodFire" : "344859315275964417",
"WholeHogTruck" : "344859366903656448",
"PaniniTruckBflo" : "344859430824845313",
"buffalosbestbbq" : "463678306605232130",
"thaimeuptruck" : "344859485296271360",
"TheCheesyChick" : "344859534478675968",
"greekonstreet" : "359790551186407425",
"PI:NAME:<NAME>END_PI" : "463677794044502016",
"PI:NAME:<NAME>END_PI" : "463678250967773184",
"Gr8PI:NAME:<NAME>END_PIini" : "463678118729773057",
"flaminfishtruck" : "620631633335443457",
"findthecrawdad" : "620631887778721792",
"streeteatsbflo" : "620632069501161472",
"PI:NAME:<NAME>END_PI" : "620632249898143744",
"716ClubHouse" : "620633236197109760",
"taffystruck" : "620632437719109632",
"PI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI" : "620632908299005952",
"bigsuzies" : "620633102260416512"
}
$list = $("#trucks")
for truck, widget of trucks
$list.append "<li id='#{truck.toLowerCase()}'>
<header><a href='https://twitter.com/#{truck}'>@#{truck}</a></header>
<a class='twitter-timeline' data-tweet-limit='3' data-dnt='true' data-chrome='noheader nofooter transparent' href='https://twitter.com/#{truck}' data-widget-id='#{widget}'>
</a>
</li>"
|
[
{
"context": "###\nknockback-inspector.js 0.1.6\n(c) 2012 Kevin Malakoff.\nKnockback-Inspector.js is freely distributable u",
"end": 56,
"score": 0.9998501539230347,
"start": 42,
"tag": "NAME",
"value": "Kevin Malakoff"
},
{
"context": "ng for full license details:\n https://github.... | src/lib/kbi_core.coffee | kmalakoff/knockback-inspector | 1 | ###
knockback-inspector.js 0.1.6
(c) 2012 Kevin Malakoff.
Knockback-Inspector.js is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback-inspector/blob/master/LICENSE
Dependencies: Knockout.js, Underscore.js, Backbone.js, and Knockback.js.
###
# import Underscore (or Lo-Dash with precedence), Backbone, Knockout, and Knockback
if not @_ and (typeof(require) isnt 'undefined') then (try _ = require('lodash') catch e then _ = require('underscore')) else _ = @_
_ = if _.hasOwnProperty('_') then _._ else _ # LEGACY
Backbone = if not @Backbone and (typeof(require) isnt 'undefined') then require('backbone') else @Backbone
ko = if not @ko and (typeof(require) isnt 'undefined') then require('knockout') else @ko
kb = if not @kb and (typeof(require) isnt 'undefined') then require('knockback') else @kb
# export or create kbi namespace
kbi = @kbi = if (typeof(exports) != 'undefined') then exports else {}
@kbi.VERSION = '0.1.6'
# export Knockback so it is accessible by the views
@kb = kb | 43544 | ###
knockback-inspector.js 0.1.6
(c) 2012 <NAME>.
Knockback-Inspector.js is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback-inspector/blob/master/LICENSE
Dependencies: Knockout.js, Underscore.js, Backbone.js, and Knockback.js.
###
# import Underscore (or Lo-Dash with precedence), Backbone, Knockout, and Knockback
if not @_ and (typeof(require) isnt 'undefined') then (try _ = require('lodash') catch e then _ = require('underscore')) else _ = @_
_ = if _.hasOwnProperty('_') then _._ else _ # LEGACY
Backbone = if not @Backbone and (typeof(require) isnt 'undefined') then require('backbone') else @Backbone
ko = if not @ko and (typeof(require) isnt 'undefined') then require('knockout') else @ko
kb = if not @kb and (typeof(require) isnt 'undefined') then require('knockback') else @kb
# export or create kbi namespace
kbi = @kbi = if (typeof(exports) != 'undefined') then exports else {}
@kbi.VERSION = '0.1.6'
# export Knockback so it is accessible by the views
@kb = kb | true | ###
knockback-inspector.js 0.1.6
(c) 2012 PI:NAME:<NAME>END_PI.
Knockback-Inspector.js is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback-inspector/blob/master/LICENSE
Dependencies: Knockout.js, Underscore.js, Backbone.js, and Knockback.js.
###
# import Underscore (or Lo-Dash with precedence), Backbone, Knockout, and Knockback
if not @_ and (typeof(require) isnt 'undefined') then (try _ = require('lodash') catch e then _ = require('underscore')) else _ = @_
_ = if _.hasOwnProperty('_') then _._ else _ # LEGACY
Backbone = if not @Backbone and (typeof(require) isnt 'undefined') then require('backbone') else @Backbone
ko = if not @ko and (typeof(require) isnt 'undefined') then require('knockout') else @ko
kb = if not @kb and (typeof(require) isnt 'undefined') then require('knockback') else @kb
# export or create kbi namespace
kbi = @kbi = if (typeof(exports) != 'undefined') then exports else {}
@kbi.VERSION = '0.1.6'
# export Knockback so it is accessible by the views
@kb = kb |
[
{
"context": "atic __dirname \n\t\tapp.use express.session {secret: '$#$wt00ne%%', store: new express.session.MemoryStore}\n\t\tapp.s",
"end": 261,
"score": 0.9542864561080933,
"start": 249,
"tag": "KEY",
"value": "'$#$wt00ne%%"
}
] | example/express-core.coffee | codeboost/Skull.io | 1 | express = require 'express'
app = express()
path = require 'path'
exports.init = (viewsDir) ->
app.configure ->
app.use express.bodyParser()
app.use express.cookieParser()
app.use express.static __dirname
app.use express.session {secret: '$#$wt00ne%%', store: new express.session.MemoryStore}
app.set 'views', viewsDir
app.set 'view engine', 'jade'
app.set 'view options', layout: false
app.get '/', (req, res) ->
console.log 'Connect.sid ', req.cookies['connect.sid']
res.render 'index'
app.get '/skull.io/skull.io.js', (req, res) ->
res.sendfile path.join(__dirname, '../lib/skull-client.js')
return app
| 3473 | express = require 'express'
app = express()
path = require 'path'
exports.init = (viewsDir) ->
app.configure ->
app.use express.bodyParser()
app.use express.cookieParser()
app.use express.static __dirname
app.use express.session {secret: <KEY>', store: new express.session.MemoryStore}
app.set 'views', viewsDir
app.set 'view engine', 'jade'
app.set 'view options', layout: false
app.get '/', (req, res) ->
console.log 'Connect.sid ', req.cookies['connect.sid']
res.render 'index'
app.get '/skull.io/skull.io.js', (req, res) ->
res.sendfile path.join(__dirname, '../lib/skull-client.js')
return app
| true | express = require 'express'
app = express()
path = require 'path'
exports.init = (viewsDir) ->
app.configure ->
app.use express.bodyParser()
app.use express.cookieParser()
app.use express.static __dirname
app.use express.session {secret: PI:KEY:<KEY>END_PI', store: new express.session.MemoryStore}
app.set 'views', viewsDir
app.set 'view engine', 'jade'
app.set 'view options', layout: false
app.get '/', (req, res) ->
console.log 'Connect.sid ', req.cookies['connect.sid']
res.render 'index'
app.get '/skull.io/skull.io.js', (req, res) ->
res.sendfile path.join(__dirname, '../lib/skull-client.js')
return app
|
[
{
"context": "date number exists', (done) ->\n vdoc = {name: 'meow', array: []}\n wongo.save 'MockValidation', vdo",
"end": 961,
"score": 0.9761753082275391,
"start": 957,
"tag": "NAME",
"value": "meow"
},
{
"context": "ate boolean exists', (done) ->\n vdoc = {name: 'meow', ... | test/validation.test.coffee | wookets/wongo | 0 | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockValidation',
fields:
name: {type: String, required: true, min: 3, max: 12} # simplest property
number: {type: Number, required: true, min: -1, max: 10} # a number property
boolean: {type: Boolean, required: true} # a boolean property
array: [{type: String, required: true}]
date: {type: Date, required: true}
enum: {type: String, required: true, enum: ['woof', 'bark', 'meow']}
default: {type: String, required: true, enum: ['cave', 'man'], default: 'cave'}
defaultBoolean: {type: Boolean, required: true, default: false}
describe 'Wongo validation', ->
it 'should validate name exists', (done) ->
vdoc = {array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name is required.')
done()
it 'should validate number exists', (done) ->
vdoc = {name: 'meow', array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number is required.')
done()
it 'should validate boolean exists', (done) ->
vdoc = {name: 'meow', number: 0, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'boolean is required.')
done()
it 'should validate array exists', (done) ->
vdoc = {name: 'meow', number: 0, boolean: true}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'array is required.')
done()
it 'should validate date exists', (done) ->
vdoc = {name: 'meow', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date is required.')
done()
it 'should validate name is a string', (done) ->
vdoc = {name: 45, number: 0, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be a string.')
done()
it 'should validate date is a date', (done) ->
vdoc = {name: 'meow', number: 0, boolean: true, array: [], date: 'notdate'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date needs to be a date.')
done()
it 'should validate name is at least 3 characters long', (done) ->
vdoc = {name: 'bo', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at least 3 characters in length.')
done()
it 'should validate name is no longer than 12 characters long', (done) ->
vdoc = {name: 'boromoineinidjsd', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at most 12 characters in length.')
done()
it 'should validate number is greater than -1', (done) ->
vdoc = {name: 'boe', number: -3, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be greater than -1.')
done()
it 'should validate number can be equal to max', (done) ->
vdoc = {name: 'boe', number: 10, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ifError(err)
done()
it 'should validate number is less than or equal to 10', (done) ->
vdoc = {name: 'brood', number: 13, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be less than or equal to 10.')
done()
it 'should validate enum is of type enum value', (done) ->
vdoc = {name: 'boo', number: 2, boolean: false, array: [], date: new Date, enum: 'moocow'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'enum must be valid.')
done()
it 'should validate defaults are being set', (done) ->
vdoc = {name: 'boo', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.default, 'cave')
done()
it 'should validate default booleans are being set to false', (done) ->
vdoc = {name: 'boo', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.defaultBoolean, false)
done()
| 89231 | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockValidation',
fields:
name: {type: String, required: true, min: 3, max: 12} # simplest property
number: {type: Number, required: true, min: -1, max: 10} # a number property
boolean: {type: Boolean, required: true} # a boolean property
array: [{type: String, required: true}]
date: {type: Date, required: true}
enum: {type: String, required: true, enum: ['woof', 'bark', 'meow']}
default: {type: String, required: true, enum: ['cave', 'man'], default: 'cave'}
defaultBoolean: {type: Boolean, required: true, default: false}
describe 'Wongo validation', ->
it 'should validate name exists', (done) ->
vdoc = {array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name is required.')
done()
it 'should validate number exists', (done) ->
vdoc = {name: '<NAME>', array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number is required.')
done()
it 'should validate boolean exists', (done) ->
vdoc = {name: '<NAME>', number: 0, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'boolean is required.')
done()
it 'should validate array exists', (done) ->
vdoc = {name: '<NAME>', number: 0, boolean: true}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'array is required.')
done()
it 'should validate date exists', (done) ->
vdoc = {name: '<NAME>', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date is required.')
done()
it 'should validate name is a string', (done) ->
vdoc = {name: 45, number: 0, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be a string.')
done()
it 'should validate date is a date', (done) ->
vdoc = {name: '<NAME>', number: 0, boolean: true, array: [], date: 'notdate'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date needs to be a date.')
done()
it 'should validate name is at least 3 characters long', (done) ->
vdoc = {name: '<NAME>', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at least 3 characters in length.')
done()
it 'should validate name is no longer than 12 characters long', (done) ->
vdoc = {name: 'boromoineinidjsd', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at most 12 characters in length.')
done()
it 'should validate number is greater than -1', (done) ->
vdoc = {name: '<NAME>', number: -3, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be greater than -1.')
done()
it 'should validate number can be equal to max', (done) ->
vdoc = {name: '<NAME>', number: 10, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ifError(err)
done()
it 'should validate number is less than or equal to 10', (done) ->
vdoc = {name: '<NAME>', number: 13, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be less than or equal to 10.')
done()
it 'should validate enum is of type enum value', (done) ->
vdoc = {name: '<NAME>', number: 2, boolean: false, array: [], date: new Date, enum: 'moocow'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'enum must be valid.')
done()
it 'should validate defaults are being set', (done) ->
vdoc = {name: '<NAME>', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.default, 'cave')
done()
it 'should validate default booleans are being set to false', (done) ->
vdoc = {name: '<NAME>', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.defaultBoolean, false)
done()
| true | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockValidation',
fields:
name: {type: String, required: true, min: 3, max: 12} # simplest property
number: {type: Number, required: true, min: -1, max: 10} # a number property
boolean: {type: Boolean, required: true} # a boolean property
array: [{type: String, required: true}]
date: {type: Date, required: true}
enum: {type: String, required: true, enum: ['woof', 'bark', 'meow']}
default: {type: String, required: true, enum: ['cave', 'man'], default: 'cave'}
defaultBoolean: {type: Boolean, required: true, default: false}
describe 'Wongo validation', ->
it 'should validate name exists', (done) ->
vdoc = {array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name is required.')
done()
it 'should validate number exists', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number is required.')
done()
it 'should validate boolean exists', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 0, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'boolean is required.')
done()
it 'should validate array exists', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 0, boolean: true}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'array is required.')
done()
it 'should validate date exists', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date is required.')
done()
it 'should validate name is a string', (done) ->
vdoc = {name: 45, number: 0, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be a string.')
done()
it 'should validate date is a date', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 0, boolean: true, array: [], date: 'notdate'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'date needs to be a date.')
done()
it 'should validate name is at least 3 characters long', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at least 3 characters in length.')
done()
it 'should validate name is no longer than 12 characters long', (done) ->
vdoc = {name: 'boromoineinidjsd', number: 0, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'name needs to be at most 12 characters in length.')
done()
it 'should validate number is greater than -1', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: -3, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be greater than -1.')
done()
it 'should validate number can be equal to max', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 10, boolean: true, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ifError(err)
done()
it 'should validate number is less than or equal to 10', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 13, boolean: true, array: []}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'number needs to be less than or equal to 10.')
done()
it 'should validate enum is of type enum value', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 2, boolean: false, array: [], date: new Date, enum: 'moocow'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(err)
assert.equal(err.message, 'enum must be valid.')
done()
it 'should validate defaults are being set', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.default, 'cave')
done()
it 'should validate default booleans are being set to false', (done) ->
vdoc = {name: 'PI:NAME:<NAME>END_PI', number: 2, boolean: false, array: [], date: new Date, enum: 'woof'}
wongo.save 'MockValidation', vdoc, (err, result) ->
assert.ok(not err)
assert.equal(result.defaultBoolean, false)
done()
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9989904761314392,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-vm-new-script-this-context.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
Script = require("vm").Script
common.globalCheck = false
console.error "run a string"
script = new Script("'passed';")
result = script.runInThisContext(script)
assert.equal "passed", result
console.error "thrown error"
script = new Script("throw new Error('test');")
assert.throws ->
script.runInThisContext script
return
hello = 5
script = new Script("hello = 2")
script.runInThisContext script
assert.equal 2, hello
console.error "pass values"
code = "foo = 1;" + "bar = 2;" + "if (typeof baz !== 'undefined') throw new Error('test fail');"
foo = 2
obj =
foo: 0
baz: 3
script = new Script(code)
script.runInThisContext script
assert.equal 0, obj.foo
assert.equal 2, bar
assert.equal 1, foo
console.error "call a function"
f = ->
foo = 100
return
script = new Script("f()")
script.runInThisContext script
assert.equal 100, foo
| 178583 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
Script = require("vm").Script
common.globalCheck = false
console.error "run a string"
script = new Script("'passed';")
result = script.runInThisContext(script)
assert.equal "passed", result
console.error "thrown error"
script = new Script("throw new Error('test');")
assert.throws ->
script.runInThisContext script
return
hello = 5
script = new Script("hello = 2")
script.runInThisContext script
assert.equal 2, hello
console.error "pass values"
code = "foo = 1;" + "bar = 2;" + "if (typeof baz !== 'undefined') throw new Error('test fail');"
foo = 2
obj =
foo: 0
baz: 3
script = new Script(code)
script.runInThisContext script
assert.equal 0, obj.foo
assert.equal 2, bar
assert.equal 1, foo
console.error "call a function"
f = ->
foo = 100
return
script = new Script("f()")
script.runInThisContext script
assert.equal 100, foo
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
Script = require("vm").Script
common.globalCheck = false
console.error "run a string"
script = new Script("'passed';")
result = script.runInThisContext(script)
assert.equal "passed", result
console.error "thrown error"
script = new Script("throw new Error('test');")
assert.throws ->
script.runInThisContext script
return
hello = 5
script = new Script("hello = 2")
script.runInThisContext script
assert.equal 2, hello
console.error "pass values"
code = "foo = 1;" + "bar = 2;" + "if (typeof baz !== 'undefined') throw new Error('test fail');"
foo = 2
obj =
foo: 0
baz: 3
script = new Script(code)
script.runInThisContext script
assert.equal 0, obj.foo
assert.equal 2, bar
assert.equal 1, foo
console.error "call a function"
f = ->
foo = 100
return
script = new Script("f()")
script.runInThisContext script
assert.equal 100, foo
|
[
{
"context": "# @file tests.coffee\n# @Copyright (c) 2016 Taylor Siviter\n# This source code is licensed under the MIT Lice",
"end": 57,
"score": 0.9998048543930054,
"start": 43,
"tag": "NAME",
"value": "Taylor Siviter"
}
] | test/tests.coffee | siviter-t/lampyridae.coffee | 4 | # @file tests.coffee
# @Copyright (c) 2016 Taylor Siviter
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
# Helpful dom element creation and removal functions
window.createElementById = (id = '', tag = 'div', parent = 'body') ->
element = document.createElement tag
if id != '' then element.id = id
if parent == 'body' then document.body.appendChild element
else document.getElementById(parent).appendChild element
return element
window.removeElementById = (id) ->
element = document.getElementById id
element.parentNode.removeChild element
# Helpful delay function
window.delay = (time, f) -> setTimeout f, time
# Introductory tests to check that a dom of some kind exists and that a version of the
# Lampyridae development library is loaded etc...
describe 'Lampyridae introductory tests', ->
it 'A DOM should exist', -> should.exist(document)
it 'and it should have a body tag', -> should.exist(document.body)
it 'Lampyridae should be loaded and accessible', -> should.exist(window.Lampyridae)
it 'jQuery shouldn\'t be loaded or needed', -> should.not.exist(window.jQuery)
# Determining the current test runner
testFile = location.pathname.split("/").slice(-1).toString()
console.info "Test runner: #{testFile}"
# Once the document is ready, start the relevant tests...
document.addEventListener 'DOMContentLoaded', () ->
if testFile == "_testUnits.html"
require './testCanvas'
else if testFile == "_testExample.html"
require './testExample'
else
throw new Error "No tests have been configured for the current test runner" | 67213 | # @file tests.coffee
# @Copyright (c) 2016 <NAME>
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
# Helpful dom element creation and removal functions
window.createElementById = (id = '', tag = 'div', parent = 'body') ->
element = document.createElement tag
if id != '' then element.id = id
if parent == 'body' then document.body.appendChild element
else document.getElementById(parent).appendChild element
return element
window.removeElementById = (id) ->
element = document.getElementById id
element.parentNode.removeChild element
# Helpful delay function
window.delay = (time, f) -> setTimeout f, time
# Introductory tests to check that a dom of some kind exists and that a version of the
# Lampyridae development library is loaded etc...
describe 'Lampyridae introductory tests', ->
it 'A DOM should exist', -> should.exist(document)
it 'and it should have a body tag', -> should.exist(document.body)
it 'Lampyridae should be loaded and accessible', -> should.exist(window.Lampyridae)
it 'jQuery shouldn\'t be loaded or needed', -> should.not.exist(window.jQuery)
# Determining the current test runner
testFile = location.pathname.split("/").slice(-1).toString()
console.info "Test runner: #{testFile}"
# Once the document is ready, start the relevant tests...
document.addEventListener 'DOMContentLoaded', () ->
if testFile == "_testUnits.html"
require './testCanvas'
else if testFile == "_testExample.html"
require './testExample'
else
throw new Error "No tests have been configured for the current test runner" | true | # @file tests.coffee
# @Copyright (c) 2016 PI:NAME:<NAME>END_PI
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
# Helpful dom element creation and removal functions
window.createElementById = (id = '', tag = 'div', parent = 'body') ->
element = document.createElement tag
if id != '' then element.id = id
if parent == 'body' then document.body.appendChild element
else document.getElementById(parent).appendChild element
return element
window.removeElementById = (id) ->
element = document.getElementById id
element.parentNode.removeChild element
# Helpful delay function
window.delay = (time, f) -> setTimeout f, time
# Introductory tests to check that a dom of some kind exists and that a version of the
# Lampyridae development library is loaded etc...
describe 'Lampyridae introductory tests', ->
it 'A DOM should exist', -> should.exist(document)
it 'and it should have a body tag', -> should.exist(document.body)
it 'Lampyridae should be loaded and accessible', -> should.exist(window.Lampyridae)
it 'jQuery shouldn\'t be loaded or needed', -> should.not.exist(window.jQuery)
# Determining the current test runner
testFile = location.pathname.split("/").slice(-1).toString()
console.info "Test runner: #{testFile}"
# Once the document is ready, start the relevant tests...
document.addEventListener 'DOMContentLoaded', () ->
if testFile == "_testUnits.html"
require './testCanvas'
else if testFile == "_testExample.html"
require './testExample'
else
throw new Error "No tests have been configured for the current test runner" |
[
{
"context": "le_Data(filename)\n .user.name.assert_Is 'Joe'\n\n it 'set_File_Data', ->\n target_File = 'tea",
"end": 1552,
"score": 0.8039158582687378,
"start": 1549,
"tag": "NAME",
"value": "Joe"
}
] | test/backend/Data-Files.test.coffee | nhannan/BSIMM | 0 | Data_Files = require '../../src/backend/Data-Files'
describe 'controllers | Api-Controller', ->
data_Files = null
beforeEach ->
data_Files = new Data_Files()
it 'constructor',->
using data_Files, ->
@.constructor.name.assert_Is 'Data_Files'
@.data_Path.assert_Contains 'data'
.assert_Folder_Exists()
it 'all_Data', ->
using data_Files, ->
using @.all_Data().assert_Not_Empty(), ->
for item in @
item.assert_Is_Object()
it 'files', ->
using data_Files, ->
@.files().assert_Not_Empty()
@.files().first().assert_File_Not_Exists()
@.files().first().assert_File_Not_Exists()
@.data_Path.path_Combine(@.files().first()).assert_File_Exists()
it 'files_Names', ->
using data_Files, ->
@.files_Names().assert_Not_Empty()
@.files_Names().first().assert_Is @.files().first().file_Name_Without_Extension()
it 'files_Paths', ->
using data_Files, ->
@.files().size().assert_Is_Not @.files_Paths().size()
@.files_Paths().assert_Not_Empty()
@.files_Paths().first().assert_File_Exists()
it 'find_File', ->
using data_Files, ->
team_A = @.find_File 'team-A'
team_A.assert_File_Exists()
assert_Is_Null @.find_File 'Team-A' # search is case sensitive
assert_Is_Null @.find_File 'aaaaaa'
assert_Is_Null @.find_File null
it 'get_File_Data', ()->
filename = 'json-data'
using data_Files, ->
@.get_File_Data(filename)
.user.name.assert_Is 'Joe'
it 'set_File_Data', ->
target_File = 'team-C'
good_Value = 'Team C'
temp_Value = 'BBBBB'
using data_Files.get_File_Data(target_File), -> # get data
@.metadata.team.assert_Is good_Value
@.metadata.team = temp_Value # change value
data_Files.set_File_Data_Json target_File, @.json_Str() # save it
.assert_Is_True() # confirm save was ok
using data_Files.get_File_Data(target_File), -> # get new copy of data
@.metadata.team.assert_Is temp_Value # check value has been changed
@.metadata.team = good_Value # restore original value
data_Files.set_File_Data_Json target_File, @.json_Pretty() # save it again
using data_Files.get_File_Data(target_File), -> # get another copy of data
@.metadata.team.assert_Is good_Value # confirm original value is there
it 'set_File_Data (bad json)', ()->
target_File = 'team-C'
bad_Json = '{ not-good : json } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, bad_Json
it 'set_File_Data (non json files)', ()->
target_File = 'team-A' # team-A is an json5 file
good_Json = '{ "is-good" : "json" } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, good_Json
it 'set_File_Data (not able to create new file)', ()->
filename = 'temp_file.json'
contents = '{ "aaa" : 123 }'
using data_Files, ->
@.set_File_Data_Json filename, contents
assert_Is_Null @.get_File_Data filename, contents
it 'set_File_Data (bad data)', ()->
using data_Files, ->
assert_Is_Null @.set_File_Data_Json()
assert_Is_Null @.set_File_Data_Json 'aaa'
assert_Is_Null @.set_File_Data_Json null, 'bbbb'
assert_Is_Null @.set_File_Data_Json 'aaa', {}
| 148586 | Data_Files = require '../../src/backend/Data-Files'
describe 'controllers | Api-Controller', ->
data_Files = null
beforeEach ->
data_Files = new Data_Files()
it 'constructor',->
using data_Files, ->
@.constructor.name.assert_Is 'Data_Files'
@.data_Path.assert_Contains 'data'
.assert_Folder_Exists()
it 'all_Data', ->
using data_Files, ->
using @.all_Data().assert_Not_Empty(), ->
for item in @
item.assert_Is_Object()
it 'files', ->
using data_Files, ->
@.files().assert_Not_Empty()
@.files().first().assert_File_Not_Exists()
@.files().first().assert_File_Not_Exists()
@.data_Path.path_Combine(@.files().first()).assert_File_Exists()
it 'files_Names', ->
using data_Files, ->
@.files_Names().assert_Not_Empty()
@.files_Names().first().assert_Is @.files().first().file_Name_Without_Extension()
it 'files_Paths', ->
using data_Files, ->
@.files().size().assert_Is_Not @.files_Paths().size()
@.files_Paths().assert_Not_Empty()
@.files_Paths().first().assert_File_Exists()
it 'find_File', ->
using data_Files, ->
team_A = @.find_File 'team-A'
team_A.assert_File_Exists()
assert_Is_Null @.find_File 'Team-A' # search is case sensitive
assert_Is_Null @.find_File 'aaaaaa'
assert_Is_Null @.find_File null
it 'get_File_Data', ()->
filename = 'json-data'
using data_Files, ->
@.get_File_Data(filename)
.user.name.assert_Is '<NAME>'
it 'set_File_Data', ->
target_File = 'team-C'
good_Value = 'Team C'
temp_Value = 'BBBBB'
using data_Files.get_File_Data(target_File), -> # get data
@.metadata.team.assert_Is good_Value
@.metadata.team = temp_Value # change value
data_Files.set_File_Data_Json target_File, @.json_Str() # save it
.assert_Is_True() # confirm save was ok
using data_Files.get_File_Data(target_File), -> # get new copy of data
@.metadata.team.assert_Is temp_Value # check value has been changed
@.metadata.team = good_Value # restore original value
data_Files.set_File_Data_Json target_File, @.json_Pretty() # save it again
using data_Files.get_File_Data(target_File), -> # get another copy of data
@.metadata.team.assert_Is good_Value # confirm original value is there
it 'set_File_Data (bad json)', ()->
target_File = 'team-C'
bad_Json = '{ not-good : json } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, bad_Json
it 'set_File_Data (non json files)', ()->
target_File = 'team-A' # team-A is an json5 file
good_Json = '{ "is-good" : "json" } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, good_Json
it 'set_File_Data (not able to create new file)', ()->
filename = 'temp_file.json'
contents = '{ "aaa" : 123 }'
using data_Files, ->
@.set_File_Data_Json filename, contents
assert_Is_Null @.get_File_Data filename, contents
it 'set_File_Data (bad data)', ()->
using data_Files, ->
assert_Is_Null @.set_File_Data_Json()
assert_Is_Null @.set_File_Data_Json 'aaa'
assert_Is_Null @.set_File_Data_Json null, 'bbbb'
assert_Is_Null @.set_File_Data_Json 'aaa', {}
| true | Data_Files = require '../../src/backend/Data-Files'
describe 'controllers | Api-Controller', ->
data_Files = null
beforeEach ->
data_Files = new Data_Files()
it 'constructor',->
using data_Files, ->
@.constructor.name.assert_Is 'Data_Files'
@.data_Path.assert_Contains 'data'
.assert_Folder_Exists()
it 'all_Data', ->
using data_Files, ->
using @.all_Data().assert_Not_Empty(), ->
for item in @
item.assert_Is_Object()
it 'files', ->
using data_Files, ->
@.files().assert_Not_Empty()
@.files().first().assert_File_Not_Exists()
@.files().first().assert_File_Not_Exists()
@.data_Path.path_Combine(@.files().first()).assert_File_Exists()
it 'files_Names', ->
using data_Files, ->
@.files_Names().assert_Not_Empty()
@.files_Names().first().assert_Is @.files().first().file_Name_Without_Extension()
it 'files_Paths', ->
using data_Files, ->
@.files().size().assert_Is_Not @.files_Paths().size()
@.files_Paths().assert_Not_Empty()
@.files_Paths().first().assert_File_Exists()
it 'find_File', ->
using data_Files, ->
team_A = @.find_File 'team-A'
team_A.assert_File_Exists()
assert_Is_Null @.find_File 'Team-A' # search is case sensitive
assert_Is_Null @.find_File 'aaaaaa'
assert_Is_Null @.find_File null
it 'get_File_Data', ()->
filename = 'json-data'
using data_Files, ->
@.get_File_Data(filename)
.user.name.assert_Is 'PI:NAME:<NAME>END_PI'
it 'set_File_Data', ->
target_File = 'team-C'
good_Value = 'Team C'
temp_Value = 'BBBBB'
using data_Files.get_File_Data(target_File), -> # get data
@.metadata.team.assert_Is good_Value
@.metadata.team = temp_Value # change value
data_Files.set_File_Data_Json target_File, @.json_Str() # save it
.assert_Is_True() # confirm save was ok
using data_Files.get_File_Data(target_File), -> # get new copy of data
@.metadata.team.assert_Is temp_Value # check value has been changed
@.metadata.team = good_Value # restore original value
data_Files.set_File_Data_Json target_File, @.json_Pretty() # save it again
using data_Files.get_File_Data(target_File), -> # get another copy of data
@.metadata.team.assert_Is good_Value # confirm original value is there
it 'set_File_Data (bad json)', ()->
target_File = 'team-C'
bad_Json = '{ not-good : json } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, bad_Json
it 'set_File_Data (non json files)', ()->
target_File = 'team-A' # team-A is an json5 file
good_Json = '{ "is-good" : "json" } '
using data_Files, ->
assert_Is_Null data_Files.set_File_Data_Json target_File, good_Json
it 'set_File_Data (not able to create new file)', ()->
filename = 'temp_file.json'
contents = '{ "aaa" : 123 }'
using data_Files, ->
@.set_File_Data_Json filename, contents
assert_Is_Null @.get_File_Data filename, contents
it 'set_File_Data (bad data)', ()->
using data_Files, ->
assert_Is_Null @.set_File_Data_Json()
assert_Is_Null @.set_File_Data_Json 'aaa'
assert_Is_Null @.set_File_Data_Json null, 'bbbb'
assert_Is_Null @.set_File_Data_Json 'aaa', {}
|
[
{
"context": "/?'\nlfm = 'http://www.last.fm/api/auth/?'\n\nkey = 'f85dd881f328badc6505a31ae9cc8626'\nsecret = 'a0c50bb8ceda91e115c44b725680d904'\n\nbui",
"end": 130,
"score": 0.9997540712356567,
"start": 98,
"tag": "KEY",
"value": "f85dd881f328badc6505a31ae9cc8626"
},
{
"context": "... | app/scripts/contentscript.coffee | tlvince/focusatwill-scrobbler | 1 | 'use strict'
api = '//ws.audioscrobbler.com/2.0/?'
lfm = 'http://www.last.fm/api/auth/?'
key = 'f85dd881f328badc6505a31ae9cc8626'
secret = 'a0c50bb8ceda91e115c44b725680d904'
buildQuery = (query) ->
str = []
for key, value of query
str.push "#{key}=#{encodeURIComponent(value)}"
str.join '&'
sign = (query) ->
keys = Object.keys(query).sort()
for key, i in keys
keys[i] = key + query[key]
signed = keys.join('') + secret
SparkMD5.hash(signed)
getTrack = ->
artist = document.querySelector('.artist')
track = document.querySelector('.track')
artist = artist.textContent.replace(/^By: /, '').trim()
track = track.textContent.trim()
nowSeconds = parseInt(new Date().getTime() / 1000)
track =
artist: artist
track: track
timestamp: nowSeconds
getToken = (cb) ->
query =
method: 'auth.gettoken'
api_key: key
format: 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
if response.token
_query =
api_key: key
token: response.token
window.open lfm + buildQuery(_query)
cb(response.token)
request.open 'GET', api + buildQuery(query)
request.send()
getSession = (token, cb) ->
query =
method: 'auth.getsession'
api_key: key
token: token
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
cb(response.sesson) if response.session
request.open 'GET', api + buildQuery(query)
request.send()
main = ->
track = document.querySelector('.track')
prevTrack = {}
observer = new window.WebKitMutationObserver (mutations) ->
mutations.forEach (mutation) ->
if mutation.target.innerText isnt ''
track = getTrack()
nowPlaying(track)
scrobble(track) if prevTrack.track
prevTrack = track
observer.observe track,
childList: true
characterData: true
subtree: true
nowPlaying = (track) ->
query =
method: 'track.updateNowPlaying'
artist: track.artist
track: track.track
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
scrobble = (track) ->
query =
method: 'track.scrobble'
artist: track.artist
track: track.track
timestamp: track.timestamp
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
unless localStorage.lfmToken
getToken (token) ->
localStorage.lfmToken = token
unless localStorage.lfmSession
getSession localStorage.lfmToken, (session) ->
localStorage.lfmSession = session.key
main()
| 107093 | 'use strict'
api = '//ws.audioscrobbler.com/2.0/?'
lfm = 'http://www.last.fm/api/auth/?'
key = '<KEY>'
secret = '<KEY>'
buildQuery = (query) ->
str = []
for key, value of query
str.push "#{key}=#{encodeURIComponent(value)}"
str.join '&'
sign = (query) ->
keys = Object.keys(query).sort()
for key, i in keys
keys[i] = key + query[key]
signed = keys.join('') + secret
SparkMD5.hash(signed)
getTrack = ->
artist = document.querySelector('.artist')
track = document.querySelector('.track')
artist = artist.textContent.replace(/^By: /, '').trim()
track = track.textContent.trim()
nowSeconds = parseInt(new Date().getTime() / 1000)
track =
artist: artist
track: track
timestamp: nowSeconds
getToken = (cb) ->
query =
method: 'auth.gettoken'
api_key: key
format: 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
if response.token
_query =
api_key: key
token: response.token
window.open lfm + buildQuery(_query)
cb(response.token)
request.open 'GET', api + buildQuery(query)
request.send()
getSession = (token, cb) ->
query =
method: 'auth.getsession'
api_key: key
token: token
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
cb(response.sesson) if response.session
request.open 'GET', api + buildQuery(query)
request.send()
main = ->
track = document.querySelector('.track')
prevTrack = {}
observer = new window.WebKitMutationObserver (mutations) ->
mutations.forEach (mutation) ->
if mutation.target.innerText isnt ''
track = getTrack()
nowPlaying(track)
scrobble(track) if prevTrack.track
prevTrack = track
observer.observe track,
childList: true
characterData: true
subtree: true
nowPlaying = (track) ->
query =
method: 'track.updateNowPlaying'
artist: track.artist
track: track.track
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
scrobble = (track) ->
query =
method: 'track.scrobble'
artist: track.artist
track: track.track
timestamp: track.timestamp
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
unless localStorage.lfmToken
getToken (token) ->
localStorage.lfmToken = token
unless localStorage.lfmSession
getSession localStorage.lfmToken, (session) ->
localStorage.lfmSession = session.key
main()
| true | 'use strict'
api = '//ws.audioscrobbler.com/2.0/?'
lfm = 'http://www.last.fm/api/auth/?'
key = 'PI:KEY:<KEY>END_PI'
secret = 'PI:KEY:<KEY>END_PI'
buildQuery = (query) ->
str = []
for key, value of query
str.push "#{key}=#{encodeURIComponent(value)}"
str.join '&'
sign = (query) ->
keys = Object.keys(query).sort()
for key, i in keys
keys[i] = key + query[key]
signed = keys.join('') + secret
SparkMD5.hash(signed)
getTrack = ->
artist = document.querySelector('.artist')
track = document.querySelector('.track')
artist = artist.textContent.replace(/^By: /, '').trim()
track = track.textContent.trim()
nowSeconds = parseInt(new Date().getTime() / 1000)
track =
artist: artist
track: track
timestamp: nowSeconds
getToken = (cb) ->
query =
method: 'auth.gettoken'
api_key: key
format: 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
if response.token
_query =
api_key: key
token: response.token
window.open lfm + buildQuery(_query)
cb(response.token)
request.open 'GET', api + buildQuery(query)
request.send()
getSession = (token, cb) ->
query =
method: 'auth.getsession'
api_key: key
token: token
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.onreadystatechange = ->
if request.readyState is 4 and request.status is 200
response = JSON.parse(request.responseText)
cb(response.sesson) if response.session
request.open 'GET', api + buildQuery(query)
request.send()
main = ->
track = document.querySelector('.track')
prevTrack = {}
observer = new window.WebKitMutationObserver (mutations) ->
mutations.forEach (mutation) ->
if mutation.target.innerText isnt ''
track = getTrack()
nowPlaying(track)
scrobble(track) if prevTrack.track
prevTrack = track
observer.observe track,
childList: true
characterData: true
subtree: true
nowPlaying = (track) ->
query =
method: 'track.updateNowPlaying'
artist: track.artist
track: track.track
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
scrobble = (track) ->
query =
method: 'track.scrobble'
artist: track.artist
track: track.track
timestamp: track.timestamp
api_key: key
token: localStorage.lfmToken
sk: localStorage.lfmSession
query.api_sig = sign(query)
query.format = 'json'
request = new XMLHttpRequest()
request.open 'POST', api + buildQuery(query)
request.send()
unless localStorage.lfmToken
getToken (token) ->
localStorage.lfmToken = token
unless localStorage.lfmSession
getSession localStorage.lfmToken, (session) ->
localStorage.lfmSession = session.key
main()
|
[
{
"context": "###\n\n vat-calc.coffee\n\n Copyright (c) 2014-2016, Daniel Ellermann\n\n Permission is hereby granted, free of charge, ",
"end": 67,
"score": 0.9997744560241699,
"start": 51,
"tag": "NAME",
"value": "Daniel Ellermann"
},
{
"context": "ts the client-side V.A.T calcul... | coffee/vat-calc.coffee | dellermann/vat-calc | 0 | ###
vat-calc.coffee
Copyright (c) 2014-2016, Daniel Ellermann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
# @nodoc
$ = jQuery
# Class `VatCalculator` represents the client-side V.A.T calculator.
#
# @author Daniel Ellermann
# @version 0.9
# @since 0.9
#
class VatCalculator
#-- Private variables -------------------------
$ = jQuery
#-- Fields ------------------------------------
DEFAULT_OPTIONS =
accessKeys:
calculate: 'c'
gross: 'g'
input: 'i'
net: 'n'
vatRate: 'r'
currency: '€'
gross: false
labels:
calculate: 'Calculate'
gross: 'Gross'
net: 'Net'
vat: 'V.A.T'
vatRate: 'Rate'
point: '.'
precision: 2
taxRates: [7, 19]
#-- Constructor -------------------------------
# Creates a new calculator within the given element.
#
# @param [Element] element the given container element
# @param [Object] options any options that overwrite the default options
#
constructor: (element, options = {}) ->
@$element = $(element)
@options = $.extend {}, DEFAULT_OPTIONS, options
@_renderTemplate()
#-- Non-public methods ------------------------
# Calculates either the net or gross value depending on the input values.
#
# @private
#
_calculate: ->
options = @options
point = options.point
precision = options.precision
input = @$input.val().replace(point, '.')
input = '0' unless input
input = parseFloat input
rate = parseFloat @$rates.val()
if @$netGrossSwitch.is ':checked'
v = 100.0 + rate
vat = rate * input / v
res = 100.0 * input / v
else
vat = input * rate / 100.0
res = input + vat
@$vat.val @_format vat
@$result.val @_format res
return
# Formats the given numeric value respecting the decimal point and precision
# from the options.
#
# @param [Number] value the given numeric value
# @return [String] the formatted number
# @private
#
_format: (value) ->
options = @options
precision = options.precision
value = value.toFixed precision if precision >= 0
value.replace /\./, options.point
# Called if the toggle switch for net/gross values has been changed. The
# method changes the label of the output field and calculates the result.
#
# @param [Event] event any event data
# @private
#
_onChangeNetGross: (event) ->
$this = $(event.currentTarget)
$label = $this.parent()
if $this.is ':checked'
@$netGrossLabel.text $label.prev().text()
else
@$netGrossLabel.text $label.next().text()
@_calculate()
# Renders the Handlebars template that displays the calculator.
#
# @private
#
_renderTemplate: ->
html = Handlebars.templates['vat-calc']
options: @options
$el = @$element
.empty()
.html(html)
.on('click', 'button', =>
@_calculate()
false
)
.on('click', (event) -> event.stopPropagation())
.on('change', '.vatcalc-net-gross-switch', (event) =>
@_onChangeNetGross event
)
.on('change', '.vatcalc-vat-rates', => @_calculate())
.on('change', '.vatcalc-input', => @_calculate())
.on('change keyup', '.vatcalc-input', => @_calculate())
@$input = $el.find '.vatcalc-input'
@$netGrossSwitch = $el.find '.vatcalc-net-gross-switch'
@$rates = $el.find '.vatcalc-vat-rates'
@$vat = $el.find '.vatcalc-vat'
@$netGrossLabel = $el.find '.vatcalc-net-gross-label'
@$result = $el.find '.vatcalc-result'
Plugin = (option) ->
args = arguments
@each ->
$this = $(this)
data = $this.data 'bs.vatcalc'
unless data
$this.data 'bs.vatcalc', (data = new VatCalculator(this, args[0]))
# @nodoc
old = $.fn.vatcalc
# @nodoc
$.fn.vatcalc = Plugin
# @nodoc
$.fn.vatcalc.Constructor = VatCalculator
# @nodoc
$.fn.vatcalc.noConflict = ->
$.fn.vatcalc = old
this
| 113121 | ###
vat-calc.coffee
Copyright (c) 2014-2016, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
# @nodoc
$ = jQuery
# Class `VatCalculator` represents the client-side V.A.T calculator.
#
# @author <NAME>
# @version 0.9
# @since 0.9
#
class VatCalculator
#-- Private variables -------------------------
$ = jQuery
#-- Fields ------------------------------------
DEFAULT_OPTIONS =
accessKeys:
calculate: 'c'
gross: 'g'
input: 'i'
net: 'n'
vatRate: 'r'
currency: '€'
gross: false
labels:
calculate: 'Calculate'
gross: 'Gross'
net: 'Net'
vat: 'V.A.T'
vatRate: 'Rate'
point: '.'
precision: 2
taxRates: [7, 19]
#-- Constructor -------------------------------
# Creates a new calculator within the given element.
#
# @param [Element] element the given container element
# @param [Object] options any options that overwrite the default options
#
constructor: (element, options = {}) ->
@$element = $(element)
@options = $.extend {}, DEFAULT_OPTIONS, options
@_renderTemplate()
#-- Non-public methods ------------------------
# Calculates either the net or gross value depending on the input values.
#
# @private
#
_calculate: ->
options = @options
point = options.point
precision = options.precision
input = @$input.val().replace(point, '.')
input = '0' unless input
input = parseFloat input
rate = parseFloat @$rates.val()
if @$netGrossSwitch.is ':checked'
v = 100.0 + rate
vat = rate * input / v
res = 100.0 * input / v
else
vat = input * rate / 100.0
res = input + vat
@$vat.val @_format vat
@$result.val @_format res
return
# Formats the given numeric value respecting the decimal point and precision
# from the options.
#
# @param [Number] value the given numeric value
# @return [String] the formatted number
# @private
#
_format: (value) ->
options = @options
precision = options.precision
value = value.toFixed precision if precision >= 0
value.replace /\./, options.point
# Called if the toggle switch for net/gross values has been changed. The
# method changes the label of the output field and calculates the result.
#
# @param [Event] event any event data
# @private
#
_onChangeNetGross: (event) ->
$this = $(event.currentTarget)
$label = $this.parent()
if $this.is ':checked'
@$netGrossLabel.text $label.prev().text()
else
@$netGrossLabel.text $label.next().text()
@_calculate()
# Renders the Handlebars template that displays the calculator.
#
# @private
#
_renderTemplate: ->
html = Handlebars.templates['vat-calc']
options: @options
$el = @$element
.empty()
.html(html)
.on('click', 'button', =>
@_calculate()
false
)
.on('click', (event) -> event.stopPropagation())
.on('change', '.vatcalc-net-gross-switch', (event) =>
@_onChangeNetGross event
)
.on('change', '.vatcalc-vat-rates', => @_calculate())
.on('change', '.vatcalc-input', => @_calculate())
.on('change keyup', '.vatcalc-input', => @_calculate())
@$input = $el.find '.vatcalc-input'
@$netGrossSwitch = $el.find '.vatcalc-net-gross-switch'
@$rates = $el.find '.vatcalc-vat-rates'
@$vat = $el.find '.vatcalc-vat'
@$netGrossLabel = $el.find '.vatcalc-net-gross-label'
@$result = $el.find '.vatcalc-result'
Plugin = (option) ->
args = arguments
@each ->
$this = $(this)
data = $this.data 'bs.vatcalc'
unless data
$this.data 'bs.vatcalc', (data = new VatCalculator(this, args[0]))
# @nodoc
old = $.fn.vatcalc
# @nodoc
$.fn.vatcalc = Plugin
# @nodoc
$.fn.vatcalc.Constructor = VatCalculator
# @nodoc
$.fn.vatcalc.noConflict = ->
$.fn.vatcalc = old
this
| true | ###
vat-calc.coffee
Copyright (c) 2014-2016, PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
# @nodoc
$ = jQuery
# Class `VatCalculator` represents the client-side V.A.T calculator.
#
# @author PI:NAME:<NAME>END_PI
# @version 0.9
# @since 0.9
#
class VatCalculator
#-- Private variables -------------------------
$ = jQuery
#-- Fields ------------------------------------
DEFAULT_OPTIONS =
accessKeys:
calculate: 'c'
gross: 'g'
input: 'i'
net: 'n'
vatRate: 'r'
currency: '€'
gross: false
labels:
calculate: 'Calculate'
gross: 'Gross'
net: 'Net'
vat: 'V.A.T'
vatRate: 'Rate'
point: '.'
precision: 2
taxRates: [7, 19]
#-- Constructor -------------------------------
# Creates a new calculator within the given element.
#
# @param [Element] element the given container element
# @param [Object] options any options that overwrite the default options
#
constructor: (element, options = {}) ->
@$element = $(element)
@options = $.extend {}, DEFAULT_OPTIONS, options
@_renderTemplate()
#-- Non-public methods ------------------------
# Calculates either the net or gross value depending on the input values.
#
# @private
#
_calculate: ->
options = @options
point = options.point
precision = options.precision
input = @$input.val().replace(point, '.')
input = '0' unless input
input = parseFloat input
rate = parseFloat @$rates.val()
if @$netGrossSwitch.is ':checked'
v = 100.0 + rate
vat = rate * input / v
res = 100.0 * input / v
else
vat = input * rate / 100.0
res = input + vat
@$vat.val @_format vat
@$result.val @_format res
return
# Formats the given numeric value respecting the decimal point and precision
# from the options.
#
# @param [Number] value the given numeric value
# @return [String] the formatted number
# @private
#
_format: (value) ->
options = @options
precision = options.precision
value = value.toFixed precision if precision >= 0
value.replace /\./, options.point
# Called if the toggle switch for net/gross values has been changed. The
# method changes the label of the output field and calculates the result.
#
# @param [Event] event any event data
# @private
#
_onChangeNetGross: (event) ->
$this = $(event.currentTarget)
$label = $this.parent()
if $this.is ':checked'
@$netGrossLabel.text $label.prev().text()
else
@$netGrossLabel.text $label.next().text()
@_calculate()
# Renders the Handlebars template that displays the calculator.
#
# @private
#
_renderTemplate: ->
html = Handlebars.templates['vat-calc']
options: @options
$el = @$element
.empty()
.html(html)
.on('click', 'button', =>
@_calculate()
false
)
.on('click', (event) -> event.stopPropagation())
.on('change', '.vatcalc-net-gross-switch', (event) =>
@_onChangeNetGross event
)
.on('change', '.vatcalc-vat-rates', => @_calculate())
.on('change', '.vatcalc-input', => @_calculate())
.on('change keyup', '.vatcalc-input', => @_calculate())
@$input = $el.find '.vatcalc-input'
@$netGrossSwitch = $el.find '.vatcalc-net-gross-switch'
@$rates = $el.find '.vatcalc-vat-rates'
@$vat = $el.find '.vatcalc-vat'
@$netGrossLabel = $el.find '.vatcalc-net-gross-label'
@$result = $el.find '.vatcalc-result'
Plugin = (option) ->
args = arguments
@each ->
$this = $(this)
data = $this.data 'bs.vatcalc'
unless data
$this.data 'bs.vatcalc', (data = new VatCalculator(this, args[0]))
# @nodoc
old = $.fn.vatcalc
# @nodoc
$.fn.vatcalc = Plugin
# @nodoc
$.fn.vatcalc.Constructor = VatCalculator
# @nodoc
$.fn.vatcalc.noConflict = ->
$.fn.vatcalc = old
this
|
[
{
"context": "を鸚鵡返しする\n# hubot 何時 - 今の時刻を教えてくれる\n#\n# Author:\n# yagizombie <yanagihara+zombie@brainpad.co.jp>\n\nmodule.export",
"end": 170,
"score": 0.9996978044509888,
"start": 160,
"tag": "USERNAME",
"value": "yagizombie"
},
{
"context": "ubot 何時 - 今の時刻を教えてくれる\n#\n# Author:\n... | src/conomi-utils.coffee | yagizombie/hubot-conomi-utils | 0 | # Description
# ちょっとしたツールを収録
#
# Commands:
# hubot アダプター - 今使っているアダプターを返す
# hubot ご一緒に <text> - <text> を鸚鵡返しする
# hubot 何時 - 今の時刻を教えてくれる
#
# Author:
# yagizombie <yanagihara+zombie@brainpad.co.jp>
module.exports = (robot) ->
robot.respond /(アダプター|ADAPTER)$/i, (msg) ->
msg.send robot.adapterName
robot.respond /(ECHO|ご一緒に|せーの)(.*)$/i, (msg) ->
msg.send msg.match[2]
robot.respond /TIME|何時$/i, (msg) ->
days = ["日", "月", "火", "水", "木", "金", "土"]
d = new Date
m = d.getMonth() + 1
msg.send "えーっと、" + d.getFullYear() + "年" + m + "月" + d.getDate() + "日(" + days[d.getDay()] + ")" + d.getHours() + "時" + d.getMinutes() + "分" + d.getSeconds() + "秒 かな。。"
| 122913 | # Description
# ちょっとしたツールを収録
#
# Commands:
# hubot アダプター - 今使っているアダプターを返す
# hubot ご一緒に <text> - <text> を鸚鵡返しする
# hubot 何時 - 今の時刻を教えてくれる
#
# Author:
# yagizombie <<EMAIL>>
module.exports = (robot) ->
robot.respond /(アダプター|ADAPTER)$/i, (msg) ->
msg.send robot.adapterName
robot.respond /(ECHO|ご一緒に|せーの)(.*)$/i, (msg) ->
msg.send msg.match[2]
robot.respond /TIME|何時$/i, (msg) ->
days = ["日", "月", "火", "水", "木", "金", "土"]
d = new Date
m = d.getMonth() + 1
msg.send "えーっと、" + d.getFullYear() + "年" + m + "月" + d.getDate() + "日(" + days[d.getDay()] + ")" + d.getHours() + "時" + d.getMinutes() + "分" + d.getSeconds() + "秒 かな。。"
| true | # Description
# ちょっとしたツールを収録
#
# Commands:
# hubot アダプター - 今使っているアダプターを返す
# hubot ご一緒に <text> - <text> を鸚鵡返しする
# hubot 何時 - 今の時刻を教えてくれる
#
# Author:
# yagizombie <PI:EMAIL:<EMAIL>END_PI>
module.exports = (robot) ->
robot.respond /(アダプター|ADAPTER)$/i, (msg) ->
msg.send robot.adapterName
robot.respond /(ECHO|ご一緒に|せーの)(.*)$/i, (msg) ->
msg.send msg.match[2]
robot.respond /TIME|何時$/i, (msg) ->
days = ["日", "月", "火", "水", "木", "金", "土"]
d = new Date
m = d.getMonth() + 1
msg.send "えーっと、" + d.getFullYear() + "年" + m + "月" + d.getDate() + "日(" + days[d.getDay()] + ")" + d.getHours() + "時" + d.getMinutes() + "分" + d.getSeconds() + "秒 かな。。"
|
[
{
"context": "ails](/install/rails)\n- [Roda](https://github.com/adam12/roda-unpoly)\n- [Rack](https://github.com/adam12/r",
"end": 992,
"score": 0.9994257092475891,
"start": 986,
"tag": "USERNAME",
"value": "adam12"
},
{
"context": "m/adam12/roda-unpoly)\n- [Rack](https://github.com... | lib/assets/javascripts/unpoly/protocol.coffee | pfw/unpoly | 0 | ###**
Server protocol
===============
You rarely need to change server-side code to use Unpoly. You don't need
to provide a JSON API, or add extra routes for AJAX requests. The server simply renders
a series of full HTML pages, like it would without Unpoly.
There is an **optional** protocol your server may use to exchange additional information
when Unpoly is [updating fragments](/up.link). The protocol mostly works by adding
additional HTTP headers (like `X-Up-Target`) to requests and responses.
While the protocol can help you optimize performance and handle some edge cases,
implementing it is **entirely optional**. For instance, `unpoly.com` itself is a static site
that uses Unpoly on the frontend and doesn't even have an active server component.
## Existing implementations
You should be able to implement the protocol in a very short time.
There are existing implementations for various web frameworks:
- [Ruby on Rails](/install/rails)
- [Roda](https://github.com/adam12/roda-unpoly)
- [Rack](https://github.com/adam12/rack-unpoly) (Sinatra, Padrino, Hanami, Cuba, ...)
- [Phoenix](https://elixirforum.com/t/unpoly-a-framework-like-turbolinks/3614/15) (Elixir)
- [PHP](https://github.com/webstronauts/php-unpoly) (Symfony, Laravel, Stack)
@module up.protocol
###
up.protocol = do ->
u = up.util
e = up.element
headerize = (camel) ->
header = camel.replace /(^.|[A-Z])/g, (char) -> '-' + char.toUpperCase()
return 'X-Up' + header
extractHeader = (xhr, shortHeader, parseFn = u.identity) ->
if value = xhr.getResponseHeader(headerize(shortHeader))
return parseFn(value)
###**
This request header contains the current Unpoly version to mark this request as a fragment update.
Server-side code may check for the presence of an `X-Up-Version` header to
distinguish [fragment updates](/up.link) from full page loads.
The `X-Up-Version` header is guaranteed to be set for all [requests made through Unpoly](/up.request).
\#\#\# Example
```http
X-Up-Version: 1.0.0
```
@header X-Up-Version
@stable
###
###**
This request header contains the CSS selector targeted for a successful fragment update.
Server-side code is free to optimize its response by only rendering HTML
that matches the selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
Unpoly will usually update a different selector in case the request fails.
This selector is sent as a second header, `X-Up-Fail-Target`.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Changing the render target from the server
The server may change the render target context by including a CSS selector as an `X-Up-Target` header
in its response.
```http
Content-Type: text/html
X-Up-Target: .selector-from-server
<div class="selector-from-server">
...
</div>
```
The frontend will use the server-provided target for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses.
The server may also set a target of `:none` to have the frontend render nothing.
In this case no response body is required:
```http
Content-Type: text/html
X-Up-Target: :none
```
@header X-Up-Target
@stable
###
###**
This request header contains the CSS selector targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a status code other than 2xx,
but still renders HTML.
Server-side code is free to optimize its response to a failed request by only rendering HTML
that matches the provided selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Signaling failed form submissions
When [submitting a form via AJAX](/form-up-submit)
Unpoly needs to know whether the form submission has failed (to update the form with
validation errors) or succeeded (to update the `[up-target]` selector).
For Unpoly to be able to detect a failed form submission, the response must be
return a non-2xx HTTP status code. We recommend to use either
400 (bad request) or 422 (unprocessable entity).
To do so in [Ruby on Rails](http://rubyonrails.org/), pass a [`:status` option to `render`](http://guides.rubyonrails.org/layouts_and_rendering.html#the-status-option):
```ruby
class UsersController < ApplicationController
def create
user_params = params[:user].permit(:email, :password)
@user = User.new(user_params)
if @user.save?
sign_in @user
else
render 'form', status: :bad_request
end
end
end
```
@header X-Up-Fail-Target
@stable
###
###**
This request header contains the targeted layer's [mode](/up.layer.mode).
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
```
@header X-Up-Mode
@stable
###
###**
This request header contains the [mode](/up.layer.mode) of the layer
targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
X-Up-Fail-Mode: root
```
@header X-Up-Fail-Mode
@stable
###
clearCacheFromXHR = (xhr) ->
parseValue = (value) ->
switch value
when 'true'
true
when 'false'
false
else
value
extractHeader(xhr, 'clearCache', parseValue)
###**
The server may send this optional response header with the value `clear` to [clear the cache](/up.cache.clear).
\#\#\# Example
```http
X-Up-Cache: clear
```
@header X-Up-Cache
@param value
The string `"clear"`.
###
###**
This request header contains a timestamp of an existing fragment that is being [reloaded](/up.reload).
The timestamp must be explicitely set by the user as an `[up-time]` attribute on the fragment.
It should indicate the time when the fragment's underlying data was last changed.
See `[up-time]` for a detailed example.
\#\#\# Format
The time is encoded is the number of seconds elapsed since the [Unix epoch](https://en.wikipedia.org/wiki/Unix_time).
For instance, a modification date of December 23th, 1:40:18 PM UTC would produce the following header:
```http
X-Up-Target: .unread-count
X-Up-Reload-From-Time: 1608730818
```
If no timestamp is known, Unpoly will send a value of zero (`X-Up-Reload-From-Time: 0`).
@header X-Up-Reload-From-Time
@stable
###
contextFromXHR = (xhr) ->
extractHeader(xhr, 'context', JSON.parse)
###**
This request header contains the targeted layer's [context](/context), serialized as JSON.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Context: { "lives": 3 }
```
\#\#\# Updating context from the server
The server may update the layer context by sending a `X-Up-Context` response header with
changed key/value pairs:
```http
Content-Type: text/html
X-Up-Context: { "lives": 2 }
<html>
...
</html>
```
Upon seeing the response header, Unpoly will assign the server-provided context object to
the layer's context object, adding or replacing keys as needed.
Client-side context keys not mentioned in the response will remain unchanged.
There is no explicit protocol to *remove* keys from the context, but the server may send a key
with a `null` value to effectively remove a key.
The frontend will use the server-provided context upates for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses. If no `X-Up-Context` response header is set,
the updating layer's context will not be changed.
It is recommended that the server only places changed key/value pairs into the `X-Up-Context`
response header, and not echo the entire context object. Otherwise any client-side changes made while
the request was in flight will get overridden by the server-provided context.
@header X-Up-Context
@experimental
###
###**
This request header contains the [context](/context) of the layer
targeted for a failed fragment update, serialized as JSON.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different contexts.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Fail-Context: { "context": "Choose a company contact" }
```
@header X-Up-Fail-Context
@experimental
###
###**
@function up.protocol.methodFromXHR
@internal
###
methodFromXHR = (xhr) ->
extractHeader(xhr, 'method', u.normalizeMethod)
###**
The server may set this optional response header to change the browser location after a fragment update.
Without this header Unpoly will set the browser location to the response URL, which is usually sufficient.
When setting `X-Up-Location` it is recommended to also set `X-Up-Method`. If no `X-Up-Method` header is given
and the response's URL changed from the request's URL, Unpoly will assume a redirect and set the
method to `GET`.
\#\#\# Internet Explorer 11
There is an edge case on Internet Explorer 11, where Unpoly cannot detect the final URL after a redirect.
You can fix this edge case by delivering `X-Up-Location` and `X-Up-Method` headers with the *last* response
in a series of redirects.
The **simplest implementation** is to set these headers for every request.
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Location
@stable
###
###**
The server may set this optional response header to change the HTTP method after a fragment update.
Without this header Unpoly will assume a `GET` method if the response's URL changed from the request's URL,
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Method
@stable
###
###**
The server may set this optional response header to change the document title after a fragment update.
Without this header Unpoly will extract the `<title>` from the server response.
This header is useful when you [optimize your response](X-Up-Target) to not render
the application layout unless targeted. Since your optimized response
no longer includes a `<title>`, you can instead use this HTTP header to pass the document title.
\#\#\# Example
```http
X-Up-Title: Playlist browser
```
@header X-Up-Title
@stable
###
###**
This request header contains the `[name]` of a [form field being validated](/input-up-validate).
When seeing this header, the server is expected to validate (but not save)
the form submission and render a new copy of the form with validation errors.
See the documentation for [`input[up-validate]`](/input-up-validate) for more information
on how server-side validation works in Unpoly.
\#\#\# Example
Assume we have an auto-validating form field:
```html
<fieldset>
<input name="email" up-validate>
</fieldset>
```
When the input is changed, Unpoly will submit the form with an additional header:
```html
X-Up-Validate: email
```
@header X-Up-Validate
@stable
###
eventPlansFromXHR = (xhr) ->
extractHeader(xhr, 'events', JSON.parse)
###**
The server may set this response header to [emit events](/up.emit) with the
requested [fragment update](a-up-target).
The header value is a [JSON](https://en.wikipedia.org/wiki/JSON) array.
Each element in the array is a JSON object representing an event to be emitted
on the `document`.
The object property `{ "type" }` defines the event's [type](https://developer.mozilla.org/en-US/docs/Web/API/Event/type). Other properties become properties of the emitted
event object.
\#\#\# Example
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "id": 5012 }, { "type": "signup:completed" }]
...
<html>
...
</html>
```
\#\#\# Emitting an event on a layer
Instead of emitting an event on the `document`, the server may also choose to
[emit the event on the layer being updated](/up.layer.emit). To do so, add a property
`{ "layer": "current" }` to the JSON object of an event:
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "name:" "foobar", "layer": "current" }]
...
<html>
...
</html>
```
@header X-Up-Events
@stable
###
acceptLayerFromXHR = (xhr) ->
# Even if acceptance has no value, the server will send
# X-Up-Accept-Layer: null
extractHeader(xhr, 'acceptLayer', JSON.parse)
###**
The server may set this response header to [accept](/up.layer.accept) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and accept the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the acceptance value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will accept the targeted overlay with the value `{user_id: 1012 }`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Accept-Layer
@stable
###
dismissLayerFromXHR = (xhr) ->
# Even if dismissal has no value, the server will send
# X-Up-Dismiss-Layer: null
extractHeader(xhr, 'dismissLayer', JSON.parse)
###**
The server may set this response header to [dismiss](/up.layer.dismiss) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and dismiss the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the dismissal value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will dismiss the targeted overlay without a dismissal value:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Dismiss-Layer: null
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Dismiss-Layer
@stable
###
###**
Server-side companion libraries like unpoly-rails set this cookie so we
have a way to detect the request method of the initial page load.
There is no JavaScript API for this.
@function up.protocol.initialRequestMethod
@internal
###
initialRequestMethod = u.memoize ->
return u.normalizeMethod(up.browser.popCookie('_up_method'))
###**
The server may set this optional cookie to echo the HTTP method of the initial request.
If the initial page was loaded with a non-`GET` HTTP method, Unpoly prefers to make a full
page load when you try to update a fragment. Once the next page was loaded with a `GET` method,
Unpoly will again update fragments.
This fixes two edge cases you might or might not care about:
1. Unpoly replaces the initial page state so it can later restore it when the user
goes back to that initial URL. However, if the initial request was a POST,
Unpoly will wrongly assume that it can restore the state by reloading with GET.
2. Some browsers have a bug where the initial request method is used for all
subsequently pushed states. That means if the user reloads the page on a later
GET state, the browser will wrongly attempt a POST request.
This issue affects Safari 9-12 (last tested in 2019-03).
Modern Firefoxes, Chromes and IE10+ don't have this behavior.
In order to allow Unpoly to detect the HTTP method of the initial page load,
the server must set a cookie:
```http
Set-Cookie: _up_method=POST
```
When Unpoly boots it will look for this cookie and configure itself accordingly.
The cookie is then deleted in order to not affect following requests.
The **simplest implementation** is to set this cookie for every request that is neither
`GET` nor an [Unpoly request](/X-Up-Version). For all other requests
an existing `_up_method` cookie should be deleted.
@cookie _up_method
@stable
###
###**
@function up.protocol.locationFromXHR
@internal
###
locationFromXHR = (xhr) ->
# We prefer the X-Up-Location header to xhr.responseURL.
# If the server redirected to a new location, Unpoly-related headers
# will be encoded in the request's query params like this:
#
# /redirect-target?_up[target]=.foo
#
# To prevent these these `_up` params from showing up in the browser URL,
# the X-Up-Location header will omit these params while `xhr.responseURL`
# will still contain them.
extractHeader(xhr, 'location') || xhr.responseURL
###**
@function up.protocol.titleFromXHR
@internal
###
titleFromXHR = (xhr) ->
extractHeader(xhr, 'title')
###**
@function up.protocol.targetFromXHR
@internal
###
targetFromXHR = (xhr) ->
extractHeader(xhr, 'target')
###**
Configures strings used in the optional [server protocol](/up.protocol).
@property up.protocol.config
@param {string} [config.csrfHeader='X-CSRF-Token']
The name of the HTTP header that will include the
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
for AJAX requests.
@param {string|Function(): string} [config.csrfParam]
The `name` of the hidden `<input>` used for sending a
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern) when
submitting a default, non-AJAX form. For AJAX request the token is sent as an
[HTTP header](/up.protocol.config#config.csrfHeader instead.
The parameter name can be configured as a string or as function that returns the parameter name.
If no name is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-param`:
```html
<meta name="csrf-param" content="authenticity_token" />
```
@param {string|Function(): string} [config.csrfToken]
The [CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
to send for unsafe requests. The token will be sent as either a HTTP header (for AJAX requests)
or hidden form `<input>` (for default, non-AJAX form submissions).
The token can either be configured as a string or as function that returns the token.
If no token is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-token`:
```
<meta name='csrf-token' content='secret12345'>
```
@param {string} [config.methodParam='_method']
The name of request parameter containing the original request method when Unpoly needs to wrap
the method.
Methods must be wrapped when making a [full page request](/up.browser.loadPage) with a methods other
than GET or POST. In this case Unpoly will make a POST request with the original request method
in a form parameter named `_method`:
```http
POST /test HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Content-Length: 11
_method=PUT
```
@stable
###
config = new up.Config ->
methodParam: '_method' # up.network.config.methodParam
csrfParam: -> e.metaContent('csrf-param') # das muss echt configurierbar sein, evtl. up.network.config.csrfParam
csrfToken: -> e.metaContent('csrf-token') # das muss echt configurierbar sein, evtl. up.network.config.csrfToken
csrfHeader: 'X-CSRF-Token' # MUSS KONFIGURIERBAR BLEIBEN, andere frameworks nutzen X-XSRF-Token
csrfHeader = ->
u.evalOption(config.csrfHeader)
csrfParam = ->
u.evalOption(config.csrfParam)
csrfToken = ->
u.evalOption(config.csrfToken)
###**
@internal
###
wrapMethod = (method, params) ->
params.add(config.methodParam, method)
return 'POST'
reset = ->
config.reset()
up.on 'up:framework:reset', reset
config: config
reset: reset
locationFromXHR: locationFromXHR
titleFromXHR: titleFromXHR
targetFromXHR: targetFromXHR
methodFromXHR: methodFromXHR
acceptLayerFromXHR: acceptLayerFromXHR
contextFromXHR: contextFromXHR
dismissLayerFromXHR: dismissLayerFromXHR
eventPlansFromXHR: eventPlansFromXHR
clearCacheFromXHR: clearCacheFromXHR
csrfHeader: csrfHeader
csrfParam: csrfParam
csrfToken: csrfToken
initialRequestMethod: initialRequestMethod
headerize: headerize
wrapMethod: wrapMethod
| 63321 | ###**
Server protocol
===============
You rarely need to change server-side code to use Unpoly. You don't need
to provide a JSON API, or add extra routes for AJAX requests. The server simply renders
a series of full HTML pages, like it would without Unpoly.
There is an **optional** protocol your server may use to exchange additional information
when Unpoly is [updating fragments](/up.link). The protocol mostly works by adding
additional HTTP headers (like `X-Up-Target`) to requests and responses.
While the protocol can help you optimize performance and handle some edge cases,
implementing it is **entirely optional**. For instance, `unpoly.com` itself is a static site
that uses Unpoly on the frontend and doesn't even have an active server component.
## Existing implementations
You should be able to implement the protocol in a very short time.
There are existing implementations for various web frameworks:
- [Ruby on Rails](/install/rails)
- [Roda](https://github.com/adam12/roda-unpoly)
- [Rack](https://github.com/adam12/rack-unpoly) (Sinatra, Padrino, Hanami, Cuba, ...)
- [Phoenix](https://elixirforum.com/t/unpoly-a-framework-like-turbolinks/3614/15) (Elixir)
- [PHP](https://github.com/webstronauts/php-unpoly) (Symfony, Laravel, Stack)
@module up.protocol
###
up.protocol = do ->
u = up.util
e = up.element
headerize = (camel) ->
header = camel.replace /(^.|[A-Z])/g, (char) -> '-' + char.toUpperCase()
return 'X-Up' + header
extractHeader = (xhr, shortHeader, parseFn = u.identity) ->
if value = xhr.getResponseHeader(headerize(shortHeader))
return parseFn(value)
###**
This request header contains the current Unpoly version to mark this request as a fragment update.
Server-side code may check for the presence of an `X-Up-Version` header to
distinguish [fragment updates](/up.link) from full page loads.
The `X-Up-Version` header is guaranteed to be set for all [requests made through Unpoly](/up.request).
\#\#\# Example
```http
X-Up-Version: 1.0.0
```
@header X-Up-Version
@stable
###
###**
This request header contains the CSS selector targeted for a successful fragment update.
Server-side code is free to optimize its response by only rendering HTML
that matches the selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
Unpoly will usually update a different selector in case the request fails.
This selector is sent as a second header, `X-Up-Fail-Target`.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Changing the render target from the server
The server may change the render target context by including a CSS selector as an `X-Up-Target` header
in its response.
```http
Content-Type: text/html
X-Up-Target: .selector-from-server
<div class="selector-from-server">
...
</div>
```
The frontend will use the server-provided target for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses.
The server may also set a target of `:none` to have the frontend render nothing.
In this case no response body is required:
```http
Content-Type: text/html
X-Up-Target: :none
```
@header X-Up-Target
@stable
###
###**
This request header contains the CSS selector targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a status code other than 2xx,
but still renders HTML.
Server-side code is free to optimize its response to a failed request by only rendering HTML
that matches the provided selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Signaling failed form submissions
When [submitting a form via AJAX](/form-up-submit)
Unpoly needs to know whether the form submission has failed (to update the form with
validation errors) or succeeded (to update the `[up-target]` selector).
For Unpoly to be able to detect a failed form submission, the response must be
return a non-2xx HTTP status code. We recommend to use either
400 (bad request) or 422 (unprocessable entity).
To do so in [Ruby on Rails](http://rubyonrails.org/), pass a [`:status` option to `render`](http://guides.rubyonrails.org/layouts_and_rendering.html#the-status-option):
```ruby
class UsersController < ApplicationController
def create
user_params = params[:user].permit(:email, :password)
@user = User.new(user_params)
if @user.save?
sign_in @user
else
render 'form', status: :bad_request
end
end
end
```
@header X-Up-Fail-Target
@stable
###
###**
This request header contains the targeted layer's [mode](/up.layer.mode).
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
```
@header X-Up-Mode
@stable
###
###**
This request header contains the [mode](/up.layer.mode) of the layer
targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
X-Up-Fail-Mode: root
```
@header X-Up-Fail-Mode
@stable
###
clearCacheFromXHR = (xhr) ->
parseValue = (value) ->
switch value
when 'true'
true
when 'false'
false
else
value
extractHeader(xhr, 'clearCache', parseValue)
###**
The server may send this optional response header with the value `clear` to [clear the cache](/up.cache.clear).
\#\#\# Example
```http
X-Up-Cache: clear
```
@header X-Up-Cache
@param value
The string `"clear"`.
###
###**
This request header contains a timestamp of an existing fragment that is being [reloaded](/up.reload).
The timestamp must be explicitely set by the user as an `[up-time]` attribute on the fragment.
It should indicate the time when the fragment's underlying data was last changed.
See `[up-time]` for a detailed example.
\#\#\# Format
The time is encoded is the number of seconds elapsed since the [Unix epoch](https://en.wikipedia.org/wiki/Unix_time).
For instance, a modification date of December 23th, 1:40:18 PM UTC would produce the following header:
```http
X-Up-Target: .unread-count
X-Up-Reload-From-Time: 1608730818
```
If no timestamp is known, Unpoly will send a value of zero (`X-Up-Reload-From-Time: 0`).
@header X-Up-Reload-From-Time
@stable
###
contextFromXHR = (xhr) ->
extractHeader(xhr, 'context', JSON.parse)
###**
This request header contains the targeted layer's [context](/context), serialized as JSON.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Context: { "lives": 3 }
```
\#\#\# Updating context from the server
The server may update the layer context by sending a `X-Up-Context` response header with
changed key/value pairs:
```http
Content-Type: text/html
X-Up-Context: { "lives": 2 }
<html>
...
</html>
```
Upon seeing the response header, Unpoly will assign the server-provided context object to
the layer's context object, adding or replacing keys as needed.
Client-side context keys not mentioned in the response will remain unchanged.
There is no explicit protocol to *remove* keys from the context, but the server may send a key
with a `null` value to effectively remove a key.
The frontend will use the server-provided context upates for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses. If no `X-Up-Context` response header is set,
the updating layer's context will not be changed.
It is recommended that the server only places changed key/value pairs into the `X-Up-Context`
response header, and not echo the entire context object. Otherwise any client-side changes made while
the request was in flight will get overridden by the server-provided context.
@header X-Up-Context
@experimental
###
###**
This request header contains the [context](/context) of the layer
targeted for a failed fragment update, serialized as JSON.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different contexts.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Fail-Context: { "context": "Choose a company contact" }
```
@header X-Up-Fail-Context
@experimental
###
###**
@function up.protocol.methodFromXHR
@internal
###
methodFromXHR = (xhr) ->
extractHeader(xhr, 'method', u.normalizeMethod)
###**
The server may set this optional response header to change the browser location after a fragment update.
Without this header Unpoly will set the browser location to the response URL, which is usually sufficient.
When setting `X-Up-Location` it is recommended to also set `X-Up-Method`. If no `X-Up-Method` header is given
and the response's URL changed from the request's URL, Unpoly will assume a redirect and set the
method to `GET`.
\#\#\# Internet Explorer 11
There is an edge case on Internet Explorer 11, where Unpoly cannot detect the final URL after a redirect.
You can fix this edge case by delivering `X-Up-Location` and `X-Up-Method` headers with the *last* response
in a series of redirects.
The **simplest implementation** is to set these headers for every request.
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Location
@stable
###
###**
The server may set this optional response header to change the HTTP method after a fragment update.
Without this header Unpoly will assume a `GET` method if the response's URL changed from the request's URL,
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Method
@stable
###
###**
The server may set this optional response header to change the document title after a fragment update.
Without this header Unpoly will extract the `<title>` from the server response.
This header is useful when you [optimize your response](X-Up-Target) to not render
the application layout unless targeted. Since your optimized response
no longer includes a `<title>`, you can instead use this HTTP header to pass the document title.
\#\#\# Example
```http
X-Up-Title: Playlist browser
```
@header X-Up-Title
@stable
###
###**
This request header contains the `[name]` of a [form field being validated](/input-up-validate).
When seeing this header, the server is expected to validate (but not save)
the form submission and render a new copy of the form with validation errors.
See the documentation for [`input[up-validate]`](/input-up-validate) for more information
on how server-side validation works in Unpoly.
\#\#\# Example
Assume we have an auto-validating form field:
```html
<fieldset>
<input name="email" up-validate>
</fieldset>
```
When the input is changed, Unpoly will submit the form with an additional header:
```html
X-Up-Validate: email
```
@header X-Up-Validate
@stable
###
eventPlansFromXHR = (xhr) ->
extractHeader(xhr, 'events', JSON.parse)
###**
The server may set this response header to [emit events](/up.emit) with the
requested [fragment update](a-up-target).
The header value is a [JSON](https://en.wikipedia.org/wiki/JSON) array.
Each element in the array is a JSON object representing an event to be emitted
on the `document`.
The object property `{ "type" }` defines the event's [type](https://developer.mozilla.org/en-US/docs/Web/API/Event/type). Other properties become properties of the emitted
event object.
\#\#\# Example
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "id": 5012 }, { "type": "signup:completed" }]
...
<html>
...
</html>
```
\#\#\# Emitting an event on a layer
Instead of emitting an event on the `document`, the server may also choose to
[emit the event on the layer being updated](/up.layer.emit). To do so, add a property
`{ "layer": "current" }` to the JSON object of an event:
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "name:" "foobar", "layer": "current" }]
...
<html>
...
</html>
```
@header X-Up-Events
@stable
###
acceptLayerFromXHR = (xhr) ->
# Even if acceptance has no value, the server will send
# X-Up-Accept-Layer: null
extractHeader(xhr, 'acceptLayer', JSON.parse)
###**
The server may set this response header to [accept](/up.layer.accept) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and accept the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the acceptance value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will accept the targeted overlay with the value `{user_id: 1012 }`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Accept-Layer
@stable
###
dismissLayerFromXHR = (xhr) ->
# Even if dismissal has no value, the server will send
# X-Up-Dismiss-Layer: null
extractHeader(xhr, 'dismissLayer', JSON.parse)
###**
The server may set this response header to [dismiss](/up.layer.dismiss) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and dismiss the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the dismissal value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will dismiss the targeted overlay without a dismissal value:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Dismiss-Layer: null
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Dismiss-Layer
@stable
###
###**
Server-side companion libraries like unpoly-rails set this cookie so we
have a way to detect the request method of the initial page load.
There is no JavaScript API for this.
@function up.protocol.initialRequestMethod
@internal
###
initialRequestMethod = u.memoize ->
return u.normalizeMethod(up.browser.popCookie('_up_method'))
###**
The server may set this optional cookie to echo the HTTP method of the initial request.
If the initial page was loaded with a non-`GET` HTTP method, Unpoly prefers to make a full
page load when you try to update a fragment. Once the next page was loaded with a `GET` method,
Unpoly will again update fragments.
This fixes two edge cases you might or might not care about:
1. Unpoly replaces the initial page state so it can later restore it when the user
goes back to that initial URL. However, if the initial request was a POST,
Unpoly will wrongly assume that it can restore the state by reloading with GET.
2. Some browsers have a bug where the initial request method is used for all
subsequently pushed states. That means if the user reloads the page on a later
GET state, the browser will wrongly attempt a POST request.
This issue affects Safari 9-12 (last tested in 2019-03).
Modern Firefoxes, Chromes and IE10+ don't have this behavior.
In order to allow Unpoly to detect the HTTP method of the initial page load,
the server must set a cookie:
```http
Set-Cookie: _up_method=POST
```
When Unpoly boots it will look for this cookie and configure itself accordingly.
The cookie is then deleted in order to not affect following requests.
The **simplest implementation** is to set this cookie for every request that is neither
`GET` nor an [Unpoly request](/X-Up-Version). For all other requests
an existing `_up_method` cookie should be deleted.
@cookie _up_method
@stable
###
###**
@function up.protocol.locationFromXHR
@internal
###
locationFromXHR = (xhr) ->
# We prefer the X-Up-Location header to xhr.responseURL.
# If the server redirected to a new location, Unpoly-related headers
# will be encoded in the request's query params like this:
#
# /redirect-target?_up[target]=.foo
#
# To prevent these these `_up` params from showing up in the browser URL,
# the X-Up-Location header will omit these params while `xhr.responseURL`
# will still contain them.
extractHeader(xhr, 'location') || xhr.responseURL
###**
@function up.protocol.titleFromXHR
@internal
###
titleFromXHR = (xhr) ->
extractHeader(xhr, 'title')
###**
@function up.protocol.targetFromXHR
@internal
###
targetFromXHR = (xhr) ->
extractHeader(xhr, 'target')
###**
Configures strings used in the optional [server protocol](/up.protocol).
@property up.protocol.config
@param {string} [config.csrfHeader='X-CSRF-Token']
The name of the HTTP header that will include the
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
for AJAX requests.
@param {string|Function(): string} [config.csrfParam]
The `name` of the hidden `<input>` used for sending a
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern) when
submitting a default, non-AJAX form. For AJAX request the token is sent as an
[HTTP header](/up.protocol.config#config.csrfHeader instead.
The parameter name can be configured as a string or as function that returns the parameter name.
If no name is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-param`:
```html
<meta name="csrf-param" content="authenticity_token" />
```
@param {string|Function(): string} [config.csrfToken]
The [CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
to send for unsafe requests. The token will be sent as either a HTTP header (for AJAX requests)
or hidden form `<input>` (for default, non-AJAX form submissions).
The token can either be configured as a string or as function that returns the token.
If no token is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-token`:
```
<meta name='csrf-token' content='<PASSWORD>'>
```
@param {string} [config.methodParam='_method']
The name of request parameter containing the original request method when Unpoly needs to wrap
the method.
Methods must be wrapped when making a [full page request](/up.browser.loadPage) with a methods other
than GET or POST. In this case Unpoly will make a POST request with the original request method
in a form parameter named `_method`:
```http
POST /test HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Content-Length: 11
_method=PUT
```
@stable
###
config = new up.Config ->
methodParam: '_method' # up.network.config.methodParam
csrfParam: -> e.metaContent('csrf-param') # das muss echt configurierbar sein, evtl. up.network.config.csrfParam
csrfToken: -> e.metaContent('csrf-token') # das muss echt configurierbar sein, evtl. up.network.config.csrfToken
csrfHeader: 'X-CSRF-Token' # MUSS KONFIGURIERBAR BLEIBEN, andere frameworks nutzen X-XSRF-Token
csrfHeader = ->
u.evalOption(config.csrfHeader)
csrfParam = ->
u.evalOption(config.csrfParam)
csrfToken = ->
u.evalOption(config.csrfToken)
###**
@internal
###
wrapMethod = (method, params) ->
params.add(config.methodParam, method)
return 'POST'
reset = ->
config.reset()
up.on 'up:framework:reset', reset
config: config
reset: reset
locationFromXHR: locationFromXHR
titleFromXHR: titleFromXHR
targetFromXHR: targetFromXHR
methodFromXHR: methodFromXHR
acceptLayerFromXHR: acceptLayerFromXHR
contextFromXHR: contextFromXHR
dismissLayerFromXHR: dismissLayerFromXHR
eventPlansFromXHR: eventPlansFromXHR
clearCacheFromXHR: clearCacheFromXHR
csrfHeader: csrfHeader
csrfParam: csrfParam
csrfToken: csrfToken
initialRequestMethod: initialRequestMethod
headerize: headerize
wrapMethod: wrapMethod
| true | ###**
Server protocol
===============
You rarely need to change server-side code to use Unpoly. You don't need
to provide a JSON API, or add extra routes for AJAX requests. The server simply renders
a series of full HTML pages, like it would without Unpoly.
There is an **optional** protocol your server may use to exchange additional information
when Unpoly is [updating fragments](/up.link). The protocol mostly works by adding
additional HTTP headers (like `X-Up-Target`) to requests and responses.
While the protocol can help you optimize performance and handle some edge cases,
implementing it is **entirely optional**. For instance, `unpoly.com` itself is a static site
that uses Unpoly on the frontend and doesn't even have an active server component.
## Existing implementations
You should be able to implement the protocol in a very short time.
There are existing implementations for various web frameworks:
- [Ruby on Rails](/install/rails)
- [Roda](https://github.com/adam12/roda-unpoly)
- [Rack](https://github.com/adam12/rack-unpoly) (Sinatra, Padrino, Hanami, Cuba, ...)
- [Phoenix](https://elixirforum.com/t/unpoly-a-framework-like-turbolinks/3614/15) (Elixir)
- [PHP](https://github.com/webstronauts/php-unpoly) (Symfony, Laravel, Stack)
@module up.protocol
###
up.protocol = do ->
u = up.util
e = up.element
headerize = (camel) ->
header = camel.replace /(^.|[A-Z])/g, (char) -> '-' + char.toUpperCase()
return 'X-Up' + header
extractHeader = (xhr, shortHeader, parseFn = u.identity) ->
if value = xhr.getResponseHeader(headerize(shortHeader))
return parseFn(value)
###**
This request header contains the current Unpoly version to mark this request as a fragment update.
Server-side code may check for the presence of an `X-Up-Version` header to
distinguish [fragment updates](/up.link) from full page loads.
The `X-Up-Version` header is guaranteed to be set for all [requests made through Unpoly](/up.request).
\#\#\# Example
```http
X-Up-Version: 1.0.0
```
@header X-Up-Version
@stable
###
###**
This request header contains the CSS selector targeted for a successful fragment update.
Server-side code is free to optimize its response by only rendering HTML
that matches the selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
Unpoly will usually update a different selector in case the request fails.
This selector is sent as a second header, `X-Up-Fail-Target`.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Changing the render target from the server
The server may change the render target context by including a CSS selector as an `X-Up-Target` header
in its response.
```http
Content-Type: text/html
X-Up-Target: .selector-from-server
<div class="selector-from-server">
...
</div>
```
The frontend will use the server-provided target for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses.
The server may also set a target of `:none` to have the frontend render nothing.
In this case no response body is required:
```http
Content-Type: text/html
X-Up-Target: :none
```
@header X-Up-Target
@stable
###
###**
This request header contains the CSS selector targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a status code other than 2xx,
but still renders HTML.
Server-side code is free to optimize its response to a failed request by only rendering HTML
that matches the provided selector. For example, you might prefer to not render an
expensive sidebar if the sidebar is not targeted.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Target: .menu
X-Up-Fail-Target: body
```
\#\#\# Signaling failed form submissions
When [submitting a form via AJAX](/form-up-submit)
Unpoly needs to know whether the form submission has failed (to update the form with
validation errors) or succeeded (to update the `[up-target]` selector).
For Unpoly to be able to detect a failed form submission, the response must be
return a non-2xx HTTP status code. We recommend to use either
400 (bad request) or 422 (unprocessable entity).
To do so in [Ruby on Rails](http://rubyonrails.org/), pass a [`:status` option to `render`](http://guides.rubyonrails.org/layouts_and_rendering.html#the-status-option):
```ruby
class UsersController < ApplicationController
def create
user_params = params[:user].permit(:email, :password)
@user = User.new(user_params)
if @user.save?
sign_in @user
else
render 'form', status: :bad_request
end
end
end
```
@header X-Up-Fail-Target
@stable
###
###**
This request header contains the targeted layer's [mode](/up.layer.mode).
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
```
@header X-Up-Mode
@stable
###
###**
This request header contains the [mode](/up.layer.mode) of the layer
targeted for a failed fragment update.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Mode: drawer
X-Up-Fail-Mode: root
```
@header X-Up-Fail-Mode
@stable
###
clearCacheFromXHR = (xhr) ->
parseValue = (value) ->
switch value
when 'true'
true
when 'false'
false
else
value
extractHeader(xhr, 'clearCache', parseValue)
###**
The server may send this optional response header with the value `clear` to [clear the cache](/up.cache.clear).
\#\#\# Example
```http
X-Up-Cache: clear
```
@header X-Up-Cache
@param value
The string `"clear"`.
###
###**
This request header contains a timestamp of an existing fragment that is being [reloaded](/up.reload).
The timestamp must be explicitely set by the user as an `[up-time]` attribute on the fragment.
It should indicate the time when the fragment's underlying data was last changed.
See `[up-time]` for a detailed example.
\#\#\# Format
The time is encoded is the number of seconds elapsed since the [Unix epoch](https://en.wikipedia.org/wiki/Unix_time).
For instance, a modification date of December 23th, 1:40:18 PM UTC would produce the following header:
```http
X-Up-Target: .unread-count
X-Up-Reload-From-Time: 1608730818
```
If no timestamp is known, Unpoly will send a value of zero (`X-Up-Reload-From-Time: 0`).
@header X-Up-Reload-From-Time
@stable
###
contextFromXHR = (xhr) ->
extractHeader(xhr, 'context', JSON.parse)
###**
This request header contains the targeted layer's [context](/context), serialized as JSON.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Context: { "lives": 3 }
```
\#\#\# Updating context from the server
The server may update the layer context by sending a `X-Up-Context` response header with
changed key/value pairs:
```http
Content-Type: text/html
X-Up-Context: { "lives": 2 }
<html>
...
</html>
```
Upon seeing the response header, Unpoly will assign the server-provided context object to
the layer's context object, adding or replacing keys as needed.
Client-side context keys not mentioned in the response will remain unchanged.
There is no explicit protocol to *remove* keys from the context, but the server may send a key
with a `null` value to effectively remove a key.
The frontend will use the server-provided context upates for both successful (HTTP status `200 OK`)
and failed (status `4xx` or `5xx`) responses. If no `X-Up-Context` response header is set,
the updating layer's context will not be changed.
It is recommended that the server only places changed key/value pairs into the `X-Up-Context`
response header, and not echo the entire context object. Otherwise any client-side changes made while
the request was in flight will get overridden by the server-provided context.
@header X-Up-Context
@experimental
###
###**
This request header contains the [context](/context) of the layer
targeted for a failed fragment update, serialized as JSON.
A fragment update is considered *failed* if the server responds with a
status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different contexts.
For example, you might prefer to not render a site navigation for overlays.
The user may choose to not send this header by configuring
`up.network.config.requestMetaKeys`.
\#\#\# Example
```http
X-Up-Fail-Context: { "context": "Choose a company contact" }
```
@header X-Up-Fail-Context
@experimental
###
###**
@function up.protocol.methodFromXHR
@internal
###
methodFromXHR = (xhr) ->
extractHeader(xhr, 'method', u.normalizeMethod)
###**
The server may set this optional response header to change the browser location after a fragment update.
Without this header Unpoly will set the browser location to the response URL, which is usually sufficient.
When setting `X-Up-Location` it is recommended to also set `X-Up-Method`. If no `X-Up-Method` header is given
and the response's URL changed from the request's URL, Unpoly will assume a redirect and set the
method to `GET`.
\#\#\# Internet Explorer 11
There is an edge case on Internet Explorer 11, where Unpoly cannot detect the final URL after a redirect.
You can fix this edge case by delivering `X-Up-Location` and `X-Up-Method` headers with the *last* response
in a series of redirects.
The **simplest implementation** is to set these headers for every request.
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Location
@stable
###
###**
The server may set this optional response header to change the HTTP method after a fragment update.
Without this header Unpoly will assume a `GET` method if the response's URL changed from the request's URL,
\#\#\# Example
```http
X-Up-Location: /current-url
X-Up-Method: GET
```
@header X-Up-Method
@stable
###
###**
The server may set this optional response header to change the document title after a fragment update.
Without this header Unpoly will extract the `<title>` from the server response.
This header is useful when you [optimize your response](X-Up-Target) to not render
the application layout unless targeted. Since your optimized response
no longer includes a `<title>`, you can instead use this HTTP header to pass the document title.
\#\#\# Example
```http
X-Up-Title: Playlist browser
```
@header X-Up-Title
@stable
###
###**
This request header contains the `[name]` of a [form field being validated](/input-up-validate).
When seeing this header, the server is expected to validate (but not save)
the form submission and render a new copy of the form with validation errors.
See the documentation for [`input[up-validate]`](/input-up-validate) for more information
on how server-side validation works in Unpoly.
\#\#\# Example
Assume we have an auto-validating form field:
```html
<fieldset>
<input name="email" up-validate>
</fieldset>
```
When the input is changed, Unpoly will submit the form with an additional header:
```html
X-Up-Validate: email
```
@header X-Up-Validate
@stable
###
eventPlansFromXHR = (xhr) ->
extractHeader(xhr, 'events', JSON.parse)
###**
The server may set this response header to [emit events](/up.emit) with the
requested [fragment update](a-up-target).
The header value is a [JSON](https://en.wikipedia.org/wiki/JSON) array.
Each element in the array is a JSON object representing an event to be emitted
on the `document`.
The object property `{ "type" }` defines the event's [type](https://developer.mozilla.org/en-US/docs/Web/API/Event/type). Other properties become properties of the emitted
event object.
\#\#\# Example
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "id": 5012 }, { "type": "signup:completed" }]
...
<html>
...
</html>
```
\#\#\# Emitting an event on a layer
Instead of emitting an event on the `document`, the server may also choose to
[emit the event on the layer being updated](/up.layer.emit). To do so, add a property
`{ "layer": "current" }` to the JSON object of an event:
```http
Content-Type: text/html
X-Up-Events: [{ "type": "user:created", "name:" "foobar", "layer": "current" }]
...
<html>
...
</html>
```
@header X-Up-Events
@stable
###
acceptLayerFromXHR = (xhr) ->
# Even if acceptance has no value, the server will send
# X-Up-Accept-Layer: null
extractHeader(xhr, 'acceptLayer', JSON.parse)
###**
The server may set this response header to [accept](/up.layer.accept) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and accept the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the acceptance value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will accept the targeted overlay with the value `{user_id: 1012 }`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Accept-Layer
@stable
###
dismissLayerFromXHR = (xhr) ->
# Even if dismissal has no value, the server will send
# X-Up-Dismiss-Layer: null
extractHeader(xhr, 'dismissLayer', JSON.parse)
###**
The server may set this response header to [dismiss](/up.layer.dismiss) the targeted overlay
in response to a fragment update.
Upon seeing the header, Unpoly will cancel the fragment update and dismiss the layer instead.
If the root layer is targeted, the header is ignored and the fragment is updated with
the response's HTML content.
The header value is the dismissal value serialized as a JSON object.
To accept an overlay without value, set the header value to the string `null`.
\#\#\# Example
The response below will dismiss the targeted overlay without a dismissal value:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Dismiss-Layer: null
<html>
...
</html>
```
\#\#\# Rendering content
The response may contain `text/html` content. If the root layer is targeted,
the `X-Up-Accept-Layer` header is ignored and the fragment is updated with
the response's HTML content.
If you know that an overlay will be closed don't want to render HTML,
have the server change the render target to `:none`:
```http
HTTP/1.1 200 OK
Content-Type: text/html
X-Up-Accept-Layer: {"user_id": 1012}
X-Up-Target: :none
```
@header X-Up-Dismiss-Layer
@stable
###
###**
Server-side companion libraries like unpoly-rails set this cookie so we
have a way to detect the request method of the initial page load.
There is no JavaScript API for this.
@function up.protocol.initialRequestMethod
@internal
###
initialRequestMethod = u.memoize ->
return u.normalizeMethod(up.browser.popCookie('_up_method'))
###**
The server may set this optional cookie to echo the HTTP method of the initial request.
If the initial page was loaded with a non-`GET` HTTP method, Unpoly prefers to make a full
page load when you try to update a fragment. Once the next page was loaded with a `GET` method,
Unpoly will again update fragments.
This fixes two edge cases you might or might not care about:
1. Unpoly replaces the initial page state so it can later restore it when the user
goes back to that initial URL. However, if the initial request was a POST,
Unpoly will wrongly assume that it can restore the state by reloading with GET.
2. Some browsers have a bug where the initial request method is used for all
subsequently pushed states. That means if the user reloads the page on a later
GET state, the browser will wrongly attempt a POST request.
This issue affects Safari 9-12 (last tested in 2019-03).
Modern Firefoxes, Chromes and IE10+ don't have this behavior.
In order to allow Unpoly to detect the HTTP method of the initial page load,
the server must set a cookie:
```http
Set-Cookie: _up_method=POST
```
When Unpoly boots it will look for this cookie and configure itself accordingly.
The cookie is then deleted in order to not affect following requests.
The **simplest implementation** is to set this cookie for every request that is neither
`GET` nor an [Unpoly request](/X-Up-Version). For all other requests
an existing `_up_method` cookie should be deleted.
@cookie _up_method
@stable
###
###**
@function up.protocol.locationFromXHR
@internal
###
locationFromXHR = (xhr) ->
# We prefer the X-Up-Location header to xhr.responseURL.
# If the server redirected to a new location, Unpoly-related headers
# will be encoded in the request's query params like this:
#
# /redirect-target?_up[target]=.foo
#
# To prevent these these `_up` params from showing up in the browser URL,
# the X-Up-Location header will omit these params while `xhr.responseURL`
# will still contain them.
extractHeader(xhr, 'location') || xhr.responseURL
###**
@function up.protocol.titleFromXHR
@internal
###
titleFromXHR = (xhr) ->
extractHeader(xhr, 'title')
###**
@function up.protocol.targetFromXHR
@internal
###
targetFromXHR = (xhr) ->
extractHeader(xhr, 'target')
###**
Configures strings used in the optional [server protocol](/up.protocol).
@property up.protocol.config
@param {string} [config.csrfHeader='X-CSRF-Token']
The name of the HTTP header that will include the
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
for AJAX requests.
@param {string|Function(): string} [config.csrfParam]
The `name` of the hidden `<input>` used for sending a
[CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern) when
submitting a default, non-AJAX form. For AJAX request the token is sent as an
[HTTP header](/up.protocol.config#config.csrfHeader instead.
The parameter name can be configured as a string or as function that returns the parameter name.
If no name is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-param`:
```html
<meta name="csrf-param" content="authenticity_token" />
```
@param {string|Function(): string} [config.csrfToken]
The [CSRF token](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern)
to send for unsafe requests. The token will be sent as either a HTTP header (for AJAX requests)
or hidden form `<input>` (for default, non-AJAX form submissions).
The token can either be configured as a string or as function that returns the token.
If no token is set, no token will be sent.
Defaults to the `content` attribute of a `<meta>` tag named `csrf-token`:
```
<meta name='csrf-token' content='PI:PASSWORD:<PASSWORD>END_PI'>
```
@param {string} [config.methodParam='_method']
The name of request parameter containing the original request method when Unpoly needs to wrap
the method.
Methods must be wrapped when making a [full page request](/up.browser.loadPage) with a methods other
than GET or POST. In this case Unpoly will make a POST request with the original request method
in a form parameter named `_method`:
```http
POST /test HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Content-Length: 11
_method=PUT
```
@stable
###
config = new up.Config ->
methodParam: '_method' # up.network.config.methodParam
csrfParam: -> e.metaContent('csrf-param') # das muss echt configurierbar sein, evtl. up.network.config.csrfParam
csrfToken: -> e.metaContent('csrf-token') # das muss echt configurierbar sein, evtl. up.network.config.csrfToken
csrfHeader: 'X-CSRF-Token' # MUSS KONFIGURIERBAR BLEIBEN, andere frameworks nutzen X-XSRF-Token
csrfHeader = ->
u.evalOption(config.csrfHeader)
csrfParam = ->
u.evalOption(config.csrfParam)
csrfToken = ->
u.evalOption(config.csrfToken)
###**
@internal
###
wrapMethod = (method, params) ->
params.add(config.methodParam, method)
return 'POST'
reset = ->
config.reset()
up.on 'up:framework:reset', reset
config: config
reset: reset
locationFromXHR: locationFromXHR
titleFromXHR: titleFromXHR
targetFromXHR: targetFromXHR
methodFromXHR: methodFromXHR
acceptLayerFromXHR: acceptLayerFromXHR
contextFromXHR: contextFromXHR
dismissLayerFromXHR: dismissLayerFromXHR
eventPlansFromXHR: eventPlansFromXHR
clearCacheFromXHR: clearCacheFromXHR
csrfHeader: csrfHeader
csrfParam: csrfParam
csrfToken: csrfToken
initialRequestMethod: initialRequestMethod
headerize: headerize
wrapMethod: wrapMethod
|
[
{
"context": "ts = ()->\n method = {}\n\n method.webhookToken = \"P8GnA43jssAXRfnYvSSmK3TC\"\n method.token = \"4KY9DDVfTc5YKfc54KlKtgSf\"\n me",
"end": 93,
"score": 0.8936864137649536,
"start": 69,
"tag": "KEY",
"value": "P8GnA43jssAXRfnYvSSmK3TC"
},
{
"context": "en = \"P8GnA43... | config/th.coffee | pcruise/peoply-slackbot | 0 | #태국 슬랙
module.exports = ()->
method = {}
method.webhookToken = "P8GnA43jssAXRfnYvSSmK3TC"
method.token = "4KY9DDVfTc5YKfc54KlKtgSf"
method.parse_app_id = 'h8yTR0fZZvHmLsQTw5W9vaCHEZTqo4ixFMIR8WCm'
method.parse_app_key = 'FtLLRlTm7WsL1h81fhtFA2rw90ZTb3yiOJzkmW0s'
method.domain = "aropayello"
method.bot_name = "YelloBot"
method.auto_msg =
wait: 'Welcome to Concierge! Please allow us a few seconds to connect you to our concierge master.'
timeout: 'Currently, we are operating from 8AM to 2AM. But don\'t worry! Your message was delivered to concierge masters. We will contact you tomorrow again. Thank you!'
return method | 144616 | #태국 슬랙
module.exports = ()->
method = {}
method.webhookToken = "<KEY>"
method.token = "<KEY>"
method.parse_app_id = 'h8yTR0fZZvHmLsQTw5W9vaCHEZTqo4ixFMIR8WCm'
method.parse_app_key = '<KEY>'
method.domain = "aropayello"
method.bot_name = "YelloBot"
method.auto_msg =
wait: 'Welcome to Concierge! Please allow us a few seconds to connect you to our concierge master.'
timeout: 'Currently, we are operating from 8AM to 2AM. But don\'t worry! Your message was delivered to concierge masters. We will contact you tomorrow again. Thank you!'
return method | true | #태국 슬랙
module.exports = ()->
method = {}
method.webhookToken = "PI:KEY:<KEY>END_PI"
method.token = "PI:KEY:<KEY>END_PI"
method.parse_app_id = 'h8yTR0fZZvHmLsQTw5W9vaCHEZTqo4ixFMIR8WCm'
method.parse_app_key = 'PI:KEY:<KEY>END_PI'
method.domain = "aropayello"
method.bot_name = "YelloBot"
method.auto_msg =
wait: 'Welcome to Concierge! Please allow us a few seconds to connect you to our concierge master.'
timeout: 'Currently, we are operating from 8AM to 2AM. But don\'t worry! Your message was delivered to concierge masters. We will contact you tomorrow again. Thank you!'
return method |
[
{
"context": " email = generateRandomEmail()\n username = generateRandomUsername()\n\n registerRequestParams = generateRegisterRe",
"end": 2152,
"score": 0.9969294667243958,
"start": 2130,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": " = generateRa... | servers/lib/server/handlers/validateemail.test.coffee | ezgikaysi/koding | 1 | { async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
{ generateValidateEmailRequestParams } = require '../../../testhelper/handler/validateemailhelper'
JUser = require '../../../models/user'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.validateemail', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : generateRandomEmail()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
validateEmailRequestParams.method = method
request validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 400 if email is not set', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams()
validateEmailRequestParams.body = null
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is not valid', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : 'someInvalidEmail'
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
# TODO: returning 'Bad request' error message instead of 'email is in use'
it 'should send HTTP 400 if email is in use', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if dotted gmail address is in use', (done) ->
email = generateRandomEmail 'gmail.com'
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
[username, host] = email.split '@'
username = username.replace /(.)/g, '$1.'
candidate = "#{username}@#{host}"
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : candidate
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is in use and password is invalid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'testpass'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : password
passwordConfirm : password
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : 'someInvalidPassword'
queue = [
(next) ->
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if 2FA was activated for the account', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'testpass'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : password
passwordConfirm : password
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : password
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting 400 for the 2fa enabled account
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is in use and password is valid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'testpass'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : password
passwordConfirm : password
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : password
queue = [
(next) ->
# registering new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email with invalid password to fail
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal 'User is logged in!'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is valid and not in use', (done) ->
cookieJar = request.jar()
validateEmailRequestParams = generateValidateEmailRequestParams
jar : cookieJar
body :
email : generateRandomEmail()
url = validateEmailRequestParams.url
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
# expecting clientId cookie to be set
expect(cookieJar.getCookieString url).to.contain 'clientId'
expect(body).to.be.equal 'true'
done()
beforeTests()
runTests()
| 112363 | { async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
{ generateValidateEmailRequestParams } = require '../../../testhelper/handler/validateemailhelper'
JUser = require '../../../models/user'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.validateemail', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : generateRandomEmail()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
validateEmailRequestParams.method = method
request validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 400 if email is not set', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams()
validateEmailRequestParams.body = null
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is not valid', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : 'someInvalidEmail'
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
# TODO: returning 'Bad request' error message instead of 'email is in use'
it 'should send HTTP 400 if email is in use', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if dotted gmail address is in use', (done) ->
email = generateRandomEmail 'gmail.com'
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
[username, host] = email.split '@'
username = username.replace /(.)/g, '$1.'
candidate = "#{username}@#{host}"
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : candidate
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is in use and password is invalid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = '<PASSWORD>'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : <PASSWORD>
passwordConfirm : <PASSWORD>
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : '<PASSWORD>'
queue = [
(next) ->
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if 2FA was activated for the account', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = '<PASSWORD>'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : <PASSWORD>
passwordConfirm : <PASSWORD>
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : <PASSWORD>
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting 400 for the 2fa enabled account
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is in use and password is valid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = '<PASSWORD>'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : <PASSWORD>
passwordConfirm : <PASSWORD>
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : <PASSWORD>
queue = [
(next) ->
# registering new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email with invalid password to fail
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal 'User is logged in!'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is valid and not in use', (done) ->
cookieJar = request.jar()
validateEmailRequestParams = generateValidateEmailRequestParams
jar : cookieJar
body :
email : generateRandomEmail()
url = validateEmailRequestParams.url
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
# expecting clientId cookie to be set
expect(cookieJar.getCookieString url).to.contain 'clientId'
expect(body).to.be.equal 'true'
done()
beforeTests()
runTests()
| true | { async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
{ generateValidateEmailRequestParams } = require '../../../testhelper/handler/validateemailhelper'
JUser = require '../../../models/user'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.validateemail', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : generateRandomEmail()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
validateEmailRequestParams.method = method
request validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 400 if email is not set', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams()
validateEmailRequestParams.body = null
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is not valid', (done) ->
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : 'someInvalidEmail'
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
# TODO: returning 'Bad request' error message instead of 'email is in use'
it 'should send HTTP 400 if email is in use', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if dotted gmail address is in use', (done) ->
email = generateRandomEmail 'gmail.com'
username = generateRandomUsername()
registerRequestParams = generateRegisterRequestParams
body :
email : email
[username, host] = email.split '@'
username = username.replace /(.)/g, '$1.'
candidate = "#{username}@#{host}"
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : candidate
# expecting email validation to fail using already registered email
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
done()
it 'should send HTTP 400 if email is in use and password is invalid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
passwordConfirm : PI:PASSWORD:<PASSWORD>END_PI
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'Bad request'
next()
]
async.series queue, done
it 'should send HTTP 400 if 2FA was activated for the account', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
passwordConfirm : PI:PASSWORD:<PASSWORD>END_PI
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : PI:PASSWORD:<PASSWORD>END_PI
queue = [
(next) ->
# registering a new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting 400 for the 2fa enabled account
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is in use and password is valid', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
passwordConfirm : PI:PASSWORD:<PASSWORD>END_PI
validateEmailRequestParams = generateValidateEmailRequestParams
body :
email : email
password : PI:PASSWORD:<PASSWORD>END_PI
queue = [
(next) ->
# registering new user
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting email with invalid password to fail
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal 'User is logged in!'
next()
]
async.series queue, done
it 'should send HTTP 200 if email is valid and not in use', (done) ->
cookieJar = request.jar()
validateEmailRequestParams = generateValidateEmailRequestParams
jar : cookieJar
body :
email : generateRandomEmail()
url = validateEmailRequestParams.url
request.post validateEmailRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
# expecting clientId cookie to be set
expect(cookieJar.getCookieString url).to.contain 'clientId'
expect(body).to.be.equal 'true'
done()
beforeTests()
runTests()
|
[
{
"context": "###\n ChangeOnScroll\n https:#github.com/ignlg/ChangeOnScroll\n\n Copyright (c) 2015 Ignacio Lago",
"end": 46,
"score": 0.9990832209587097,
"start": 41,
"tag": "USERNAME",
"value": "ignlg"
},
{
"context": "hub.com/ignlg/ChangeOnScroll\n\n Copyright (c) 2015 Ignacio ... | jquery-changeonscroll.coffee | ignlg/ChangeOnScroll | 0 | ###
ChangeOnScroll
https:#github.com/ignlg/ChangeOnScroll
Copyright (c) 2015 Ignacio Lago
MIT license
###
(($) ->
$.isChangeOnScroll = (el) ->
not not $(el).data 'ChangeOnScroll'
$.ChangeOnScroll = (el, options) ->
base = this
base.$el = $ el
base.el = el
# Add a reverse reference to the DOM object.
base.$el.data 'ChangeOnScroll', base
# A flag so we know if the scroll has been reset.
isReset = false
# Who should change.
target = base.$el
# Who fires the change when scrolled above the top of the page.
reference = target
# The offset top of the element when resetScroll was called.
offsetTop = 0
# The offset left of the element when resetScroll was called.
offsetLeft = 0
resetScroll = ->
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Set that this has been called at least once.
isReset = true
# Checks to see if we need to do something based on new scroll position
# of the page.
checkScroll = ->
if not $.isChangeOnScroll target then return
wasReset = isReset
# If resetScroll has not yet been called, call it. This only
# happens once.
if not isReset
resetScroll()
else
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Grab the current horizontal scroll position.
x = $(window).scrollLeft()
# Grab the current vertical scroll position.
y = $(window).scrollTop()
# If the vertical or horizontall scroll position, plus the
# optional offsets, would put the reference element above the top
# of the page, set the class to the target element.
if (base.options.verticalScroll and y > offsetTop) or
( base.options.horizontalScroll and x > offsetLeft)
target.addClass base.options.className
else
target.removeClass base.options.className
windowResize = (event) ->
isReset = false
checkScroll()
windowScroll = (event) ->
if not not window.requestAnimationFrame
window.requestAnimationFrame(checkScroll)
else checkScroll()
preventDefault = (e) ->
e = e or window.event
if e.preventDefault
e.preventDefault()
e.returnValue = false
return
# Initializes this plugin. Captures the options passed in and binds
# to the window scroll and resize events.
base.init = ->
# Capture the options for this plugin.
base.options = $.extend {}, $.ChangeOnScroll.defaultOptions, options
if base.options.reference
reference = $ base.options.reference
# Reset the reference element offsets when the window is resized, then
# check to see if we need to toggle the target element class.
$(window).bind 'resize.ChangeOnScroll', windowResize
# Same for scroll.
$(window).bind 'scroll.ChangeOnScroll', windowScroll
# For touch devices.
if 'ontouchmove' of window
$(window).bind 'touchmove.ChangeOnScroll', checkScroll
reference.bind 'scroll.ChangeOnScroll', -> checkScroll()
reference.bind 'detach.ChangeOnScroll', (ev) ->
preventDefault ev
$(window).unbind 'resize.ChangeOnScroll', windowResize
$(window).unbind 'scroll.ChangeOnScroll', windowScroll
reference.unbind '.ChangeOnScroll'
base.$el.removeData 'ChangeOnScroll'
# Reset everything.
windowResize()
# Initialize the plugin.
base.init()
# Sets the option defaults.
$.ChangeOnScroll.defaultOptions =
topOffset: 0
leftOffset: 0
verticalScroll: true
horizontalScroll: true
className: 'changed-on-scroll'
$.fn.changeOnScroll = (options) ->
@each -> new ($.ChangeOnScroll)(this, options)
return
) jQuery
| 160123 | ###
ChangeOnScroll
https:#github.com/ignlg/ChangeOnScroll
Copyright (c) 2015 <NAME>
MIT license
###
(($) ->
$.isChangeOnScroll = (el) ->
not not $(el).data 'ChangeOnScroll'
$.ChangeOnScroll = (el, options) ->
base = this
base.$el = $ el
base.el = el
# Add a reverse reference to the DOM object.
base.$el.data 'ChangeOnScroll', base
# A flag so we know if the scroll has been reset.
isReset = false
# Who should change.
target = base.$el
# Who fires the change when scrolled above the top of the page.
reference = target
# The offset top of the element when resetScroll was called.
offsetTop = 0
# The offset left of the element when resetScroll was called.
offsetLeft = 0
resetScroll = ->
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Set that this has been called at least once.
isReset = true
# Checks to see if we need to do something based on new scroll position
# of the page.
checkScroll = ->
if not $.isChangeOnScroll target then return
wasReset = isReset
# If resetScroll has not yet been called, call it. This only
# happens once.
if not isReset
resetScroll()
else
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Grab the current horizontal scroll position.
x = $(window).scrollLeft()
# Grab the current vertical scroll position.
y = $(window).scrollTop()
# If the vertical or horizontall scroll position, plus the
# optional offsets, would put the reference element above the top
# of the page, set the class to the target element.
if (base.options.verticalScroll and y > offsetTop) or
( base.options.horizontalScroll and x > offsetLeft)
target.addClass base.options.className
else
target.removeClass base.options.className
windowResize = (event) ->
isReset = false
checkScroll()
windowScroll = (event) ->
if not not window.requestAnimationFrame
window.requestAnimationFrame(checkScroll)
else checkScroll()
preventDefault = (e) ->
e = e or window.event
if e.preventDefault
e.preventDefault()
e.returnValue = false
return
# Initializes this plugin. Captures the options passed in and binds
# to the window scroll and resize events.
base.init = ->
# Capture the options for this plugin.
base.options = $.extend {}, $.ChangeOnScroll.defaultOptions, options
if base.options.reference
reference = $ base.options.reference
# Reset the reference element offsets when the window is resized, then
# check to see if we need to toggle the target element class.
$(window).bind 'resize.ChangeOnScroll', windowResize
# Same for scroll.
$(window).bind 'scroll.ChangeOnScroll', windowScroll
# For touch devices.
if 'ontouchmove' of window
$(window).bind 'touchmove.ChangeOnScroll', checkScroll
reference.bind 'scroll.ChangeOnScroll', -> checkScroll()
reference.bind 'detach.ChangeOnScroll', (ev) ->
preventDefault ev
$(window).unbind 'resize.ChangeOnScroll', windowResize
$(window).unbind 'scroll.ChangeOnScroll', windowScroll
reference.unbind '.ChangeOnScroll'
base.$el.removeData 'ChangeOnScroll'
# Reset everything.
windowResize()
# Initialize the plugin.
base.init()
# Sets the option defaults.
$.ChangeOnScroll.defaultOptions =
topOffset: 0
leftOffset: 0
verticalScroll: true
horizontalScroll: true
className: 'changed-on-scroll'
$.fn.changeOnScroll = (options) ->
@each -> new ($.ChangeOnScroll)(this, options)
return
) jQuery
| true | ###
ChangeOnScroll
https:#github.com/ignlg/ChangeOnScroll
Copyright (c) 2015 PI:NAME:<NAME>END_PI
MIT license
###
(($) ->
$.isChangeOnScroll = (el) ->
not not $(el).data 'ChangeOnScroll'
$.ChangeOnScroll = (el, options) ->
base = this
base.$el = $ el
base.el = el
# Add a reverse reference to the DOM object.
base.$el.data 'ChangeOnScroll', base
# A flag so we know if the scroll has been reset.
isReset = false
# Who should change.
target = base.$el
# Who fires the change when scrolled above the top of the page.
reference = target
# The offset top of the element when resetScroll was called.
offsetTop = 0
# The offset left of the element when resetScroll was called.
offsetLeft = 0
resetScroll = ->
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Set that this has been called at least once.
isReset = true
# Checks to see if we need to do something based on new scroll position
# of the page.
checkScroll = ->
if not $.isChangeOnScroll target then return
wasReset = isReset
# If resetScroll has not yet been called, call it. This only
# happens once.
if not isReset
resetScroll()
else
# Capture the offset top of the reference element.
offsetTop = reference.offset().top + ( base.options.topOffset or 0 )
# Capture the offset left of the reference element.
offsetLeft = reference.offset().left + ( base.options.leftOffset or 0 )
# Grab the current horizontal scroll position.
x = $(window).scrollLeft()
# Grab the current vertical scroll position.
y = $(window).scrollTop()
# If the vertical or horizontall scroll position, plus the
# optional offsets, would put the reference element above the top
# of the page, set the class to the target element.
if (base.options.verticalScroll and y > offsetTop) or
( base.options.horizontalScroll and x > offsetLeft)
target.addClass base.options.className
else
target.removeClass base.options.className
windowResize = (event) ->
isReset = false
checkScroll()
windowScroll = (event) ->
if not not window.requestAnimationFrame
window.requestAnimationFrame(checkScroll)
else checkScroll()
preventDefault = (e) ->
e = e or window.event
if e.preventDefault
e.preventDefault()
e.returnValue = false
return
# Initializes this plugin. Captures the options passed in and binds
# to the window scroll and resize events.
base.init = ->
# Capture the options for this plugin.
base.options = $.extend {}, $.ChangeOnScroll.defaultOptions, options
if base.options.reference
reference = $ base.options.reference
# Reset the reference element offsets when the window is resized, then
# check to see if we need to toggle the target element class.
$(window).bind 'resize.ChangeOnScroll', windowResize
# Same for scroll.
$(window).bind 'scroll.ChangeOnScroll', windowScroll
# For touch devices.
if 'ontouchmove' of window
$(window).bind 'touchmove.ChangeOnScroll', checkScroll
reference.bind 'scroll.ChangeOnScroll', -> checkScroll()
reference.bind 'detach.ChangeOnScroll', (ev) ->
preventDefault ev
$(window).unbind 'resize.ChangeOnScroll', windowResize
$(window).unbind 'scroll.ChangeOnScroll', windowScroll
reference.unbind '.ChangeOnScroll'
base.$el.removeData 'ChangeOnScroll'
# Reset everything.
windowResize()
# Initialize the plugin.
base.init()
# Sets the option defaults.
$.ChangeOnScroll.defaultOptions =
topOffset: 0
leftOffset: 0
verticalScroll: true
horizontalScroll: true
className: 'changed-on-scroll'
$.fn.changeOnScroll = (options) ->
@each -> new ($.ChangeOnScroll)(this, options)
return
) jQuery
|
[
{
"context": " a person on Pipl by email address\n#\n# Author:\n# Scott J Roberts - @sroberts\n\nPIPL_API_KEY = process.env.PIPL_API_",
"end": 265,
"score": 0.9998388290405273,
"start": 250,
"tag": "NAME",
"value": "Scott J Roberts"
},
{
"context": "by email address\n#\n# Author:\n... | src/scripts/pipl.coffee | 3ch01c/hubot-vtr-scripts | 47 | # Description:
# Lookup a user on Pipl
#
# Dependencies:
# None
#
# Configuration:
# PIPL_API_KEY - Sign up at http://dev.pipl.com/
#
# Commands:
# hubot pipl email <email_address> - Looks up a person on Pipl by email address
#
# Author:
# Scott J Roberts - @sroberts
PIPL_API_KEY = process.env.PIPL_API_KEY
api_url = "http://api.pipl.com"
module.exports = (robot) ->
robot.respond /pipl email (.*)/i, (msg) ->
if PIPL_API_KEY?
target_email = msg.match[1].toLowerCase()
request_url = api_url + "/search/v3/json/?email=#{encodeURIComponent target_email}&exact_name=0&query_params_mode=and&key=#{PIPL_API_KEY}"
request_response = robot.http(request_url)
.get() (err, res, body) ->
if res.statusCode is 200
pipl_json = JSON.parse(body)
person_sources = "Person:\n"
records_source = "Records:\n"
if pipl_json.error?
msg.send "Yeah... that didn't work: #{pipl_json.error}"
else
## person
if pipl_json.person.sources?
person_sources += """ - #{person_source.name}: #{person_source.url}\n""" for person_source in pipl_json.person.sources
else
person_sources += """ - No information found.\n"""
## related_urls (really noisy)
# for related_url in pipl_json.related_urls
# msg.send related_url
## records
if pipl_json.records?
records_source += """ - #{record.source.name}: #{record.source.url}\n""" for record in pipl_json.records
else
records_source += """ - No information found.\n"""
pipl_summary = """
Ok, here's what I found about #{target_email}
Total Records: #{pipl_json["@records_count"]}
#{person_sources}
#{records_source}
"""
msg.send pipl_summary
else
msg.send "I couldn't access #{api_url}. Maybe this helps you figure it out? Error Message: #{err}. Status Code: #{res.statusCode}"
else
msg.send "Pipl API key not configured. Get one at http://dev.pipl.com/"
| 147400 | # Description:
# Lookup a user on Pipl
#
# Dependencies:
# None
#
# Configuration:
# PIPL_API_KEY - Sign up at http://dev.pipl.com/
#
# Commands:
# hubot pipl email <email_address> - Looks up a person on Pipl by email address
#
# Author:
# <NAME> - @sroberts
PIPL_API_KEY = process.env.PIPL_API_KEY
api_url = "http://api.pipl.com"
module.exports = (robot) ->
robot.respond /pipl email (.*)/i, (msg) ->
if PIPL_API_KEY?
target_email = msg.match[1].toLowerCase()
request_url = api_url + "/search/v3/json/?email=#{encodeURIComponent target_email}&exact_name=0&query_params_mode=and&key=#{PIPL_API_KEY}"
request_response = robot.http(request_url)
.get() (err, res, body) ->
if res.statusCode is 200
pipl_json = JSON.parse(body)
person_sources = "Person:\n"
records_source = "Records:\n"
if pipl_json.error?
msg.send "Yeah... that didn't work: #{pipl_json.error}"
else
## person
if pipl_json.person.sources?
person_sources += """ - #{person_source.name}: #{person_source.url}\n""" for person_source in pipl_json.person.sources
else
person_sources += """ - No information found.\n"""
## related_urls (really noisy)
# for related_url in pipl_json.related_urls
# msg.send related_url
## records
if pipl_json.records?
records_source += """ - #{record.source.name}: #{record.source.url}\n""" for record in pipl_json.records
else
records_source += """ - No information found.\n"""
pipl_summary = """
Ok, here's what I found about #{target_email}
Total Records: #{pipl_json["@records_count"]}
#{person_sources}
#{records_source}
"""
msg.send pipl_summary
else
msg.send "I couldn't access #{api_url}. Maybe this helps you figure it out? Error Message: #{err}. Status Code: #{res.statusCode}"
else
msg.send "Pipl API key not configured. Get one at http://dev.pipl.com/"
| true | # Description:
# Lookup a user on Pipl
#
# Dependencies:
# None
#
# Configuration:
# PIPL_API_KEY - Sign up at http://dev.pipl.com/
#
# Commands:
# hubot pipl email <email_address> - Looks up a person on Pipl by email address
#
# Author:
# PI:NAME:<NAME>END_PI - @sroberts
PIPL_API_KEY = process.env.PIPL_API_KEY
api_url = "http://api.pipl.com"
module.exports = (robot) ->
robot.respond /pipl email (.*)/i, (msg) ->
if PIPL_API_KEY?
target_email = msg.match[1].toLowerCase()
request_url = api_url + "/search/v3/json/?email=#{encodeURIComponent target_email}&exact_name=0&query_params_mode=and&key=#{PIPL_API_KEY}"
request_response = robot.http(request_url)
.get() (err, res, body) ->
if res.statusCode is 200
pipl_json = JSON.parse(body)
person_sources = "Person:\n"
records_source = "Records:\n"
if pipl_json.error?
msg.send "Yeah... that didn't work: #{pipl_json.error}"
else
## person
if pipl_json.person.sources?
person_sources += """ - #{person_source.name}: #{person_source.url}\n""" for person_source in pipl_json.person.sources
else
person_sources += """ - No information found.\n"""
## related_urls (really noisy)
# for related_url in pipl_json.related_urls
# msg.send related_url
## records
if pipl_json.records?
records_source += """ - #{record.source.name}: #{record.source.url}\n""" for record in pipl_json.records
else
records_source += """ - No information found.\n"""
pipl_summary = """
Ok, here's what I found about #{target_email}
Total Records: #{pipl_json["@records_count"]}
#{person_sources}
#{records_source}
"""
msg.send pipl_summary
else
msg.send "I couldn't access #{api_url}. Maybe this helps you figure it out? Error Message: #{err}. Status Code: #{res.statusCode}"
else
msg.send "Pipl API key not configured. Get one at http://dev.pipl.com/"
|
[
{
"context": "\n@TODO\n\n@namespace Atoms.Atom\n@class GMap\n\n@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\"use strict\"\n\nc",
"end": 75,
"score": 0.9998553991317749,
"start": 54,
"tag": "NAME",
"value": "Javier Jimenez Villar"
},
{
"context": ".Atom\n@class G... | atom/gmap.coffee | tapquo/atoms-app-gmaps | 0 | ###
@TODO
@namespace Atoms.Atom
@class GMap
@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi
###
"use strict"
class Atoms.Atom.GMap extends Atoms.Class.Atom
@version : "1.0.2"
@template : """
<div {{#if.style}}class="{{style}}"{{/if.style}}>
<span class="loading-animation"></span>
</div>"""
@base : "GMap"
@events : ["touch", "query", "route", "marker", "load"]
_map : null
_markers : []
_query : []
_route : null
output: ->
super
if Atoms.$("[data-extension=gmap]").length > 0
do @__load
else
url = "https://maps.googleapis.com/maps/api/js?v=3.exp&sensor=true&callback=console.log"
Atoms.resource("gmap", "script", url).then (error, value) =>
unless error
do @__load
else
console.error "Atoms.App.GMap error loading resources"
# Methods Instance
center: (position, zoom_level = 8) ->
@_map.setCenter new google?.maps?.LatLng(position.latitude, position.longitude)
@zoom zoom_level
zoom: (level) ->
@_map.setZoom level
query: (value) ->
parameters = {}
unless typeof value is "string"
parameters.latLng = __queryPlace value
else
parameters.address = value
@_query = []
service = new google?.maps?.Geocoder()
service.geocode parameters, (results, status) =>
if status is google.maps.GeocoderStatus.OK
@_query = (__parseAddress result for result in results)
@bubble "query", @_query
true
marker: (attributes) ->
marker = new google?.maps?.Marker
map : @_map
icon : __markerIcon attributes.icon
position : new google.maps.LatLng(attributes.latitude, attributes.longitude)
id : attributes.id
marker.setAnimation google.maps.Animation.BOUNCE if attributes.animate
if attributes.id
google.maps.event.addListener marker, "click", (e) =>
@bubble "marker", id: marker.id
@_markers.push marker
true
route: (origin, destination, mode = "DRIVING", markers) ->
@clean()
service = new google?.maps?.DirectionsService()
parameters =
origin : __queryPlace origin
destination : __queryPlace destination
travelMode : google.maps.TravelMode.DRIVING
service.route parameters, (@_route, status) =>
if status is google.maps.DirectionsStatus.OK
parameters = suppressMarkers: markers?
@_route.renderer = new google.maps.DirectionsRenderer parameters
@_route.renderer.setMap @_map
@_route.renderer.setDirections @_route
@__markersInRoute markers if markers
@bubble "route", @_route
true
routeInstructions: ->
instructions = @_route?.routes[0]?.legs[0]
if instructions
instructions =
distance: instructions.distance.text
duration: instructions.duration.text
steps : __parseRouteSteps instructions
instructions
clean: ->
marker.setMap null for marker in @_markers
@_markers = []
@_route?.renderer.setMap null
@_route = null
# -- Privates ----------------------------------------------------------------
__load: =>
@handleInterval = setInterval =>
if google?.maps?.Map?
clearInterval @handleInterval
@bubble "load"
@_map = new google.maps.Map @el[0],
center : new google.maps.LatLng(43.256963, -2.923441)
zoom : 1
mobile : true
sensor : false
disableDefaultUI: true
if "touch" in (@attributes.events or [])
google.maps.event.addListener @_map, "click", (e) =>
@bubble "touch", latitude: e.latLng.k, longitude: e.latLng.B
, 200
__markersInRoute: (markers) ->
instructions = @_route?.routes[0]?.legs[0]
if markers.origin
start = instructions.start_location
@marker latitude: start.k, longitude: start.A, markers.origin
if markers.destination
end = instructions.end_location
@marker latitude: end.k, longitude: end.A, markers.destination
# ==============================================================================
__markerIcon = (icon) ->
if icon
new google?.maps?.MarkerImage(
icon.url,
new google.maps.Size( icon.size_x, icon.size_y ),
new google.maps.Point( 0, 0 ),
new google.maps.Point( icon.anchor_x, icon.anchor_y )
)
else
null
__queryPlace = (value) ->
unless typeof value is "string"
if value.latitude? and value.longitude?
value = new google?.maps?.LatLng value.latitude, value.longitude
else
value = null
value
__parseAddress = (address) ->
address : address.formatted_address
type : address.types[0]
position:
latitude : address.geometry.location.k
longitude : address.geometry.location.B
__parseRouteSteps = (instructions) ->
steps = []
for step in instructions.steps
steps.push
distance : step.distance.text,
duration : step.duration.text,
instructions: step.instructions
steps
| 222161 | ###
@TODO
@namespace Atoms.Atom
@class GMap
@author <NAME> <<EMAIL>> || @soyjavi
###
"use strict"
class Atoms.Atom.GMap extends Atoms.Class.Atom
@version : "1.0.2"
@template : """
<div {{#if.style}}class="{{style}}"{{/if.style}}>
<span class="loading-animation"></span>
</div>"""
@base : "GMap"
@events : ["touch", "query", "route", "marker", "load"]
_map : null
_markers : []
_query : []
_route : null
output: ->
super
if Atoms.$("[data-extension=gmap]").length > 0
do @__load
else
url = "https://maps.googleapis.com/maps/api/js?v=3.exp&sensor=true&callback=console.log"
Atoms.resource("gmap", "script", url).then (error, value) =>
unless error
do @__load
else
console.error "Atoms.App.GMap error loading resources"
# Methods Instance
center: (position, zoom_level = 8) ->
@_map.setCenter new google?.maps?.LatLng(position.latitude, position.longitude)
@zoom zoom_level
zoom: (level) ->
@_map.setZoom level
query: (value) ->
parameters = {}
unless typeof value is "string"
parameters.latLng = __queryPlace value
else
parameters.address = value
@_query = []
service = new google?.maps?.Geocoder()
service.geocode parameters, (results, status) =>
if status is google.maps.GeocoderStatus.OK
@_query = (__parseAddress result for result in results)
@bubble "query", @_query
true
marker: (attributes) ->
marker = new google?.maps?.Marker
map : @_map
icon : __markerIcon attributes.icon
position : new google.maps.LatLng(attributes.latitude, attributes.longitude)
id : attributes.id
marker.setAnimation google.maps.Animation.BOUNCE if attributes.animate
if attributes.id
google.maps.event.addListener marker, "click", (e) =>
@bubble "marker", id: marker.id
@_markers.push marker
true
route: (origin, destination, mode = "DRIVING", markers) ->
@clean()
service = new google?.maps?.DirectionsService()
parameters =
origin : __queryPlace origin
destination : __queryPlace destination
travelMode : google.maps.TravelMode.DRIVING
service.route parameters, (@_route, status) =>
if status is google.maps.DirectionsStatus.OK
parameters = suppressMarkers: markers?
@_route.renderer = new google.maps.DirectionsRenderer parameters
@_route.renderer.setMap @_map
@_route.renderer.setDirections @_route
@__markersInRoute markers if markers
@bubble "route", @_route
true
routeInstructions: ->
instructions = @_route?.routes[0]?.legs[0]
if instructions
instructions =
distance: instructions.distance.text
duration: instructions.duration.text
steps : __parseRouteSteps instructions
instructions
clean: ->
marker.setMap null for marker in @_markers
@_markers = []
@_route?.renderer.setMap null
@_route = null
# -- Privates ----------------------------------------------------------------
__load: =>
@handleInterval = setInterval =>
if google?.maps?.Map?
clearInterval @handleInterval
@bubble "load"
@_map = new google.maps.Map @el[0],
center : new google.maps.LatLng(43.256963, -2.923441)
zoom : 1
mobile : true
sensor : false
disableDefaultUI: true
if "touch" in (@attributes.events or [])
google.maps.event.addListener @_map, "click", (e) =>
@bubble "touch", latitude: e.latLng.k, longitude: e.latLng.B
, 200
__markersInRoute: (markers) ->
instructions = @_route?.routes[0]?.legs[0]
if markers.origin
start = instructions.start_location
@marker latitude: start.k, longitude: start.A, markers.origin
if markers.destination
end = instructions.end_location
@marker latitude: end.k, longitude: end.A, markers.destination
# ==============================================================================
__markerIcon = (icon) ->
if icon
new google?.maps?.MarkerImage(
icon.url,
new google.maps.Size( icon.size_x, icon.size_y ),
new google.maps.Point( 0, 0 ),
new google.maps.Point( icon.anchor_x, icon.anchor_y )
)
else
null
__queryPlace = (value) ->
unless typeof value is "string"
if value.latitude? and value.longitude?
value = new google?.maps?.LatLng value.latitude, value.longitude
else
value = null
value
__parseAddress = (address) ->
address : address.formatted_address
type : address.types[0]
position:
latitude : address.geometry.location.k
longitude : address.geometry.location.B
__parseRouteSteps = (instructions) ->
steps = []
for step in instructions.steps
steps.push
distance : step.distance.text,
duration : step.duration.text,
instructions: step.instructions
steps
| true | ###
@TODO
@namespace Atoms.Atom
@class GMap
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @soyjavi
###
"use strict"
class Atoms.Atom.GMap extends Atoms.Class.Atom
@version : "1.0.2"
@template : """
<div {{#if.style}}class="{{style}}"{{/if.style}}>
<span class="loading-animation"></span>
</div>"""
@base : "GMap"
@events : ["touch", "query", "route", "marker", "load"]
_map : null
_markers : []
_query : []
_route : null
output: ->
super
if Atoms.$("[data-extension=gmap]").length > 0
do @__load
else
url = "https://maps.googleapis.com/maps/api/js?v=3.exp&sensor=true&callback=console.log"
Atoms.resource("gmap", "script", url).then (error, value) =>
unless error
do @__load
else
console.error "Atoms.App.GMap error loading resources"
# Methods Instance
center: (position, zoom_level = 8) ->
@_map.setCenter new google?.maps?.LatLng(position.latitude, position.longitude)
@zoom zoom_level
zoom: (level) ->
@_map.setZoom level
query: (value) ->
parameters = {}
unless typeof value is "string"
parameters.latLng = __queryPlace value
else
parameters.address = value
@_query = []
service = new google?.maps?.Geocoder()
service.geocode parameters, (results, status) =>
if status is google.maps.GeocoderStatus.OK
@_query = (__parseAddress result for result in results)
@bubble "query", @_query
true
marker: (attributes) ->
marker = new google?.maps?.Marker
map : @_map
icon : __markerIcon attributes.icon
position : new google.maps.LatLng(attributes.latitude, attributes.longitude)
id : attributes.id
marker.setAnimation google.maps.Animation.BOUNCE if attributes.animate
if attributes.id
google.maps.event.addListener marker, "click", (e) =>
@bubble "marker", id: marker.id
@_markers.push marker
true
route: (origin, destination, mode = "DRIVING", markers) ->
@clean()
service = new google?.maps?.DirectionsService()
parameters =
origin : __queryPlace origin
destination : __queryPlace destination
travelMode : google.maps.TravelMode.DRIVING
service.route parameters, (@_route, status) =>
if status is google.maps.DirectionsStatus.OK
parameters = suppressMarkers: markers?
@_route.renderer = new google.maps.DirectionsRenderer parameters
@_route.renderer.setMap @_map
@_route.renderer.setDirections @_route
@__markersInRoute markers if markers
@bubble "route", @_route
true
routeInstructions: ->
instructions = @_route?.routes[0]?.legs[0]
if instructions
instructions =
distance: instructions.distance.text
duration: instructions.duration.text
steps : __parseRouteSteps instructions
instructions
clean: ->
marker.setMap null for marker in @_markers
@_markers = []
@_route?.renderer.setMap null
@_route = null
# -- Privates ----------------------------------------------------------------
__load: =>
@handleInterval = setInterval =>
if google?.maps?.Map?
clearInterval @handleInterval
@bubble "load"
@_map = new google.maps.Map @el[0],
center : new google.maps.LatLng(43.256963, -2.923441)
zoom : 1
mobile : true
sensor : false
disableDefaultUI: true
if "touch" in (@attributes.events or [])
google.maps.event.addListener @_map, "click", (e) =>
@bubble "touch", latitude: e.latLng.k, longitude: e.latLng.B
, 200
__markersInRoute: (markers) ->
instructions = @_route?.routes[0]?.legs[0]
if markers.origin
start = instructions.start_location
@marker latitude: start.k, longitude: start.A, markers.origin
if markers.destination
end = instructions.end_location
@marker latitude: end.k, longitude: end.A, markers.destination
# ==============================================================================
__markerIcon = (icon) ->
if icon
new google?.maps?.MarkerImage(
icon.url,
new google.maps.Size( icon.size_x, icon.size_y ),
new google.maps.Point( 0, 0 ),
new google.maps.Point( icon.anchor_x, icon.anchor_y )
)
else
null
__queryPlace = (value) ->
unless typeof value is "string"
if value.latitude? and value.longitude?
value = new google?.maps?.LatLng value.latitude, value.longitude
else
value = null
value
__parseAddress = (address) ->
address : address.formatted_address
type : address.types[0]
position:
latitude : address.geometry.location.k
longitude : address.geometry.location.B
__parseRouteSteps = (instructions) ->
steps = []
for step in instructions.steps
steps.push
distance : step.distance.text,
duration : step.duration.text,
instructions: step.instructions
steps
|
[
{
"context": " rich text editing jQuery UI widget\n# (c) 2011 Henri Bergius, IKS Consortium\n# Hallo may be freely distrib",
"end": 79,
"score": 0.9998518824577332,
"start": 66,
"tag": "NAME",
"value": "Henri Bergius"
}
] | src/plugins/block.coffee | git-j/hallo | 0 | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 Henri Bergius, IKS Consortium
# Hallo may be freely distributed under the MIT license
# Blockquote Plugin
# allows to change the current selection/ current block outer element
# provides a dropdown-menu-item that highlights the current block-type if any
# beware: changing the block-type over multiple blocks may result in dissortion
((jQuery) ->
jQuery.widget 'IKS.halloblock',
options:
editable: null
toolbar: null
uuid: ''
# supported block elements
elements: [
'h1'
'h2'
'h3'
'p'
# 'pre'
# 'blockquote'
# 'none'
]
buttonCssClass: null
# populate toolbar
# creates a dropdown that is appended to the given toolbar
populateToolbar: (toolbar) ->
buttonset = jQuery "<span class=\"#{@widgetName}\"></span>"
contentId = "#{@options.uuid}-#{@widgetName}-data"
target = @_prepareDropdown contentId
buttonset.append target
buttonset.append @_prepareButton target
toolbar.append buttonset
# prepare dropdown
# return jq_dom_element thah will be displayed when the toolbar-icon is triggered
_prepareDropdown: (contentId) ->
contentArea = jQuery "<div id=\"#{contentId}\"></div>"
containingElement = @options.editable.element.get(0).tagName.toLowerCase()
# add a single dropdown menu entry
addElement = (element) =>
el = jQuery "<button class='blockselector'>#{element}</button>"
if containingElement is element
el.addClass 'selected'
unless containingElement is 'div'
el.addClass 'disabled'
# execute the block-formatting commands on clicking the menu-item
el.bind 'click', =>
if el.hasClass 'disabled'
return
if element == 'none'
@options.editable.execute 'removeFormat'
return
if navigator.appName is 'Microsoft Internet Explorer'
@options.editable.execute 'FormatBlock', '<'+element.toUpperCase()+'>'
else
@options.editable.execute 'formatBlock', element.toUpperCase()
# query the state of the current cursor block and change the toolbar accordingly
queryState = (event) =>
block = document.queryCommandValue 'formatBlock'
if block.toLowerCase() is element
el.addClass 'selected'
return
el.removeClass 'selected'
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'halloenabled', =>
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'hallodisabled', =>
@options.editable.element.unbind 'keyup paste change mouseup', queryState
el
# build the menu-items for all elements that are configured by options
for element in @options.elements
contentArea.append addElement element
contentArea
# prepare toolbar button
# creates a toolbar button
_prepareButton: (target) ->
buttonElement = jQuery '<span></span>'
button_label = 'block'
if ( window.action_list && window.action_list['hallojs_block'] != undefined )
button_label = window.action_list['hallojs_block'].title
buttonElement.hallodropdownbutton
uuid: @options.uuid
editable: @options.editable
label: button_label
command: 'block'
icon: 'icon-text-height'
target: target
cssClass: @options.buttonCssClass
buttonElement
)(jQuery)
| 32786 | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 <NAME>, IKS Consortium
# Hallo may be freely distributed under the MIT license
# Blockquote Plugin
# allows to change the current selection/ current block outer element
# provides a dropdown-menu-item that highlights the current block-type if any
# beware: changing the block-type over multiple blocks may result in dissortion
((jQuery) ->
jQuery.widget 'IKS.halloblock',
options:
editable: null
toolbar: null
uuid: ''
# supported block elements
elements: [
'h1'
'h2'
'h3'
'p'
# 'pre'
# 'blockquote'
# 'none'
]
buttonCssClass: null
# populate toolbar
# creates a dropdown that is appended to the given toolbar
populateToolbar: (toolbar) ->
buttonset = jQuery "<span class=\"#{@widgetName}\"></span>"
contentId = "#{@options.uuid}-#{@widgetName}-data"
target = @_prepareDropdown contentId
buttonset.append target
buttonset.append @_prepareButton target
toolbar.append buttonset
# prepare dropdown
# return jq_dom_element thah will be displayed when the toolbar-icon is triggered
_prepareDropdown: (contentId) ->
contentArea = jQuery "<div id=\"#{contentId}\"></div>"
containingElement = @options.editable.element.get(0).tagName.toLowerCase()
# add a single dropdown menu entry
addElement = (element) =>
el = jQuery "<button class='blockselector'>#{element}</button>"
if containingElement is element
el.addClass 'selected'
unless containingElement is 'div'
el.addClass 'disabled'
# execute the block-formatting commands on clicking the menu-item
el.bind 'click', =>
if el.hasClass 'disabled'
return
if element == 'none'
@options.editable.execute 'removeFormat'
return
if navigator.appName is 'Microsoft Internet Explorer'
@options.editable.execute 'FormatBlock', '<'+element.toUpperCase()+'>'
else
@options.editable.execute 'formatBlock', element.toUpperCase()
# query the state of the current cursor block and change the toolbar accordingly
queryState = (event) =>
block = document.queryCommandValue 'formatBlock'
if block.toLowerCase() is element
el.addClass 'selected'
return
el.removeClass 'selected'
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'halloenabled', =>
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'hallodisabled', =>
@options.editable.element.unbind 'keyup paste change mouseup', queryState
el
# build the menu-items for all elements that are configured by options
for element in @options.elements
contentArea.append addElement element
contentArea
# prepare toolbar button
# creates a toolbar button
_prepareButton: (target) ->
buttonElement = jQuery '<span></span>'
button_label = 'block'
if ( window.action_list && window.action_list['hallojs_block'] != undefined )
button_label = window.action_list['hallojs_block'].title
buttonElement.hallodropdownbutton
uuid: @options.uuid
editable: @options.editable
label: button_label
command: 'block'
icon: 'icon-text-height'
target: target
cssClass: @options.buttonCssClass
buttonElement
)(jQuery)
| true | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 PI:NAME:<NAME>END_PI, IKS Consortium
# Hallo may be freely distributed under the MIT license
# Blockquote Plugin
# allows to change the current selection/ current block outer element
# provides a dropdown-menu-item that highlights the current block-type if any
# beware: changing the block-type over multiple blocks may result in dissortion
((jQuery) ->
jQuery.widget 'IKS.halloblock',
options:
editable: null
toolbar: null
uuid: ''
# supported block elements
elements: [
'h1'
'h2'
'h3'
'p'
# 'pre'
# 'blockquote'
# 'none'
]
buttonCssClass: null
# populate toolbar
# creates a dropdown that is appended to the given toolbar
populateToolbar: (toolbar) ->
buttonset = jQuery "<span class=\"#{@widgetName}\"></span>"
contentId = "#{@options.uuid}-#{@widgetName}-data"
target = @_prepareDropdown contentId
buttonset.append target
buttonset.append @_prepareButton target
toolbar.append buttonset
# prepare dropdown
# return jq_dom_element thah will be displayed when the toolbar-icon is triggered
_prepareDropdown: (contentId) ->
contentArea = jQuery "<div id=\"#{contentId}\"></div>"
containingElement = @options.editable.element.get(0).tagName.toLowerCase()
# add a single dropdown menu entry
addElement = (element) =>
el = jQuery "<button class='blockselector'>#{element}</button>"
if containingElement is element
el.addClass 'selected'
unless containingElement is 'div'
el.addClass 'disabled'
# execute the block-formatting commands on clicking the menu-item
el.bind 'click', =>
if el.hasClass 'disabled'
return
if element == 'none'
@options.editable.execute 'removeFormat'
return
if navigator.appName is 'Microsoft Internet Explorer'
@options.editable.execute 'FormatBlock', '<'+element.toUpperCase()+'>'
else
@options.editable.execute 'formatBlock', element.toUpperCase()
# query the state of the current cursor block and change the toolbar accordingly
queryState = (event) =>
block = document.queryCommandValue 'formatBlock'
if block.toLowerCase() is element
el.addClass 'selected'
return
el.removeClass 'selected'
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'halloenabled', =>
@options.editable.element.bind 'keyup paste change mouseup', queryState
@options.editable.element.bind 'hallodisabled', =>
@options.editable.element.unbind 'keyup paste change mouseup', queryState
el
# build the menu-items for all elements that are configured by options
for element in @options.elements
contentArea.append addElement element
contentArea
# prepare toolbar button
# creates a toolbar button
_prepareButton: (target) ->
buttonElement = jQuery '<span></span>'
button_label = 'block'
if ( window.action_list && window.action_list['hallojs_block'] != undefined )
button_label = window.action_list['hallojs_block'].title
buttonElement.hallodropdownbutton
uuid: @options.uuid
editable: @options.editable
label: button_label
command: 'block'
icon: 'icon-text-height'
target: target
cssClass: @options.buttonCssClass
buttonElement
)(jQuery)
|
[
{
"context": " route: 'gitHub'\n path: \"https://github.com/lumapictures/meteor-luma-ui\"\n external: true\n page:\n ",
"end": 414,
"score": 0.9985811710357666,
"start": 402,
"tag": "USERNAME",
"value": "lumapictures"
},
{
"context": "route: 'reportBugs'\n path: \"h... | example/routes.coffee | LumaPictures/meteor-luma-ui | 10 | Router.addRoutes [
{
route: 'home'
path: '/'
controller: 'ExampleController'
page:
title: "Home"
subtitle: "This isn't really home, its work."
},{
route: 'jQuerySelect2'
path: "https://jquery-select2.meteor.com"
external: true
page:
title: "jQuery Select2"
subtitle: "Open Source Repo"
},{
route: 'gitHub'
path: "https://github.com/lumapictures/meteor-luma-ui"
external: true
page:
title: "GitHub"
subtitle: "Open Source Repo"
nav:
priority: 1000
icon: 'icon-github'
},{
route: 'reportBugs'
path: "https://github.com/lumapictures/meteor-luma-ui/issues/new"
external: true
page:
title: "Report Bugs"
subtitle: "GitHub Issues"
},{
route: 'source'
path: "http://lumapictures.github.io/meteor-luma-ui/"
external: true
page:
title: "Annotated Source"
subtitle: "GitHub pages generated by Groc"
nav:
priority: 1001
icon: 'icon-code'
},{
route: 'build'
path: "https://travis-ci.org/LumaPictures/meteor-luma-ui"
external: true
page:
title: "Build Status"
subtitle: "Continuous Integration by Travis CI"
nav:
priority: 1002
icon: 'icon-cogs'
},{
route: 'errorPages'
path: "/error-pages"
controller: 'ExampleController'
nav:
priority: 0
icon: 'icon-warning'
children: [{
title: 'Not Found'
route: 'error404'
}]
page:
title: 'Error Pages'
},{
route: 'error404'
path: "/error-pages/404"
controller: 'FullPageController'
},{
route: "forms"
path: "/forms"
controller: 'ExampleController'
nav:
priority: 9
icon: 'icon-stack'
children: [{
title: 'Form Snippets'
route: 'formSnippets'
children: [{
title: 'Bug Report Form'
route: 'bugReportForm'
},{
title: 'Feedback Form'
route: 'feedbackForm'
},{
title: 'Job Application Form'
route: 'jobApplicationForm'
},{
title: 'Subscribe Form'
route: 'subscribeForm'
},{
title: 'Simple Contact Form'
route: 'simpleContactForm'
},{
title: 'Advanced Contact Form'
route: 'advancedContactForm'
},{
title: 'Simple Registration Form'
route: 'simpleRegistrationForm'
},{
title: 'Seperated Form'
route: 'seperatedForm'
},{
title: 'Shipping Information Form'
route: 'shippingInformationForm'
},{
title: 'Shipping Method Form'
route: 'shippingMethodForm'
},{
title: 'Billing Address Form'
route: 'billingAddressForm'
},{
title: 'Payment Information Form'
route: 'paymentInformationForm'
}]
},{
title: 'Form Components'
route: 'formComponents'
children: [{
title: 'Form Elements'
route: 'formElements'
children: [{
title: 'Basic Inputs'
route: 'basicInputs'
},{
title: 'Styled Elements'
route: 'styledElements'
},{
title: 'Unstyled Elements'
route: 'unstyledElements'
},{
title: 'Spinners'
route: 'spinners'
}]
},{
title: 'WYSIWYG Editors'
route: 'wysiwygEditors'
},{
title: 'Selects'
route: 'selects'
},{
title: 'jQuery Select2'
route: 'jQuerySelect2'
},{
title: 'Multi File Uploaders'
route: 'multiFileUploaders'
},{
title: 'Input Grids'
route: 'inputGrids'
}]
},{
title: 'Form Layouts'
route: 'formLayouts'
},{
title: 'Form Validation'
route: 'formValidation'
}]
page:
title: 'Forms'
subtitle: 'A necessary evil'
callouts: [
cssClass: "callout-success"
title: "We All Hate Filling Out Forms"
message: "Time to change that."
]
},{
route: "formLayouts"
path: "/forms/form-layouts"
controller: 'ExampleController'
page:
title: "Form Layouts"
subtitle: "Getting Laid Out."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Flexible Form Layouts"
message: "Organized Input = Organized Data."
]
},{
route: "formValidation"
path: "/forms/form-validation"
controller: 'ExampleController'
page:
title: "Form Validation"
subtitle: "Everyone needs a little validation."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-success"
title: "Client Side Validation"
message: "You shouldn't have to wait on the server to know that your data works."
]
},{
route: "formComponents"
path: "/forms/form-components"
controller: 'ExampleController'
page:
title: "Form Components"
subtitle: "Forms are badass."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Reusable Form Components"
message: "All of these components are abstract enough to be used with any data."
]
},{
route: "wysiwygEditors"
path: "/forms/form-components/wysiwyg-editors"
controller: 'ExampleController'
page:
title: "WYSIWYG"
subtitle: "Awesome editors"
breadcrumbs: [{
title: "Forms"
route: "forms"
},{
title: "Form Components"
route: "formComponents"
}]
callouts: [
cssClass: "callout-danger"
title: "WYSIWYG editors"
message: "Content editors are based on awesome WYSIHTML5 plugin. Default examples of editor - inside and outside panel, in modal dialog."
]
},{
route: "multiFileUploaders"
path: "/forms/form-components/multi-file-uploaders"
controller: 'ExampleController'
page:
title: 'Multi File Uploaders'
subtitle: "Two is better than one."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Doing the same thing over and over sucks"
message: "Now you can do it all in one shot."
]
},{
route: "inputGrids"
path: "/forms/form-components/input-grids"
controller: 'ExampleController'
page:
title: 'Input Grids'
subtitle: "Welcome to the Grid."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-info"
title: "Rigid Structure for Dynamic Data"
message: "A flexible grids lets you build a flexible app."
]
},{
route: "selects"
path: "/forms/form-components/selects"
controller: 'ExampleController'
page:
title: "Selects"
subtitle: "You must choose wisely."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Lots of options for selecting lots of options"
message: "Now you can do it all in one shot."
]
},{
route: "formElements"
path: "/forms/form-components/form-elements"
controller: 'ExampleController'
page:
title: 'Form Elements'
subtitle: "It's elementary my dear."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Forms Aren't Sexy"
message: "But that doesn't mean they can just wear sweatpants."
]
},{
route: "basicInputs"
path: "/forms/form-components/form-elements/basic-inputs"
controller: 'ExampleController'
page:
title: 'Basic Inputs'
subtitle: 'Sometimes old fashioned is best.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-success"
title: "Basic Doesn't Mean Stupid"
message: "These input elements are all reactive and will display dynamic data."
]
},{
route: "styledElements"
path: "/forms/form-components/form-elements/styled-elements"
controller: 'ExampleController'
page:
title: 'Styled Elements'
subtitle: 'Form swag.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "Forms, So Hot Right Now"
message: "Who wants to look at something ugly all day?"
]
},{
route: "unstyledElements"
path: "/forms/form-components/form-elements/unstyled-elements"
controller: 'ExampleController'
page:
title: 'Unstyled Elements'
subtitle: "Not ugly, just unstyled."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Plain Ole Checkboxes"
message: "Cause why not."
]
},{
route: "spinners"
path: "/forms/form-components/form-elements/spinners"
controller: 'ExampleController'
page:
title: 'Spinners'
subtitle: 'Right round baby.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "I'm actually hypnotizing you"
message: "When I snap my figures you're going to wake up."
]
},{
route: "formSnippets"
path: "/forms/form-snippets"
controller: 'ExampleController'
page:
title: "Form Snippets"
subtitle: "Snip Snip."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-warning"
title: "Just Getting the Standards Out of the Way"
message: "A couple of ready-to-use form snippets - reports, contact forms, CV, shipping, payment, job listings etc."
]
},{
route: "bugReportForm"
path: "/forms/form-snippets/bug-report-form"
controller: 'ExampleController'
page:
title: "Bug Report Form"
subtitle: "I hate bugs."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "jobApplicationForm"
path: "/forms/form-snippets/job-application-form"
controller: 'ExampleController'
page:
title: "Job Application Form"
subtitle: "Work sucks, but being broke is worse."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleContactForm"
path: "/forms/form-snippets/simple-contact-form"
controller: 'ExampleController'
page:
title: "Simple Contact Form"
subtitle: "Let me get those digits."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "advancedContactForm"
path: "/forms/form-snippets/advanced-contact-form"
controller: 'ExampleController'
page:
title: "Advanced Contact Form"
subtitle: "For that special kind of contact."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleRegistrationForm"
path: "/forms/form-snippets/simple-registration-form"
controller: 'ExampleController'
page:
title: "Simple Registration Form"
subtitle: "You're in the system, man."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "seperatedForm"
path: "/forms/form-snippets/seperated-form"
controller: 'ExampleController'
page:
title: "Seperated Form"
subtitle: "You gotta keep 'em seperated."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "surveyForm"
path: "/forms/form-snippets/survey-form"
controller: 'ExampleController'
page:
title: "Survey Form"
subtitle: "Everyone hates surveys, except these of course."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "feedbackForm"
path: "/forms/form-snippets/feedback-form"
controller: 'ExampleController'
page:
title: "Feedback Form"
subtitle: "My foot gives great feedback."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "subscribeForm"
path: "/forms/form-snippets/subscribe-form"
controller: 'ExampleController'
page:
title: "Subscription Form"
subtitle: "Want some magazines?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingInformationForm"
path: "/forms/form-snippets/shipping-information-form"
controller: 'ExampleController'
page:
title: "Shipping Information Form"
subtitle: "Where are we sending your shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingMethodForm"
path: "/forms/form-snippets/shipping-method-form"
controller: 'ExampleController'
page:
title: "Shipping Method Form"
subtitle: "How do you want your shit wrapped?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "billingAddressForm"
path: "/forms/form-snippets/billing-address-form"
controller: 'ExampleController'
page:
title: "Billing Address Form"
subtitle: "Whose paying for this shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "paymentInformationForm"
path: "/forms/form-snippets/payment-information-form"
controller: 'ExampleController'
page:
title: "Payment Information Form"
subtitle: "And now I have your credit card number..."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "interfaceComponents"
path: "/interface-components"
controller: 'ExampleController'
nav:
priority: 10
icon: 'icon-grid'
children: [{
title: 'Visuals'
route: 'visuals'
},{
title: 'Navs'
route: 'navs'
},{
title: 'Panel Options'
route: 'panelOptions'
},{
title: 'Navbars'
route: 'navbars'
},{
title: 'Info Blocks'
route: 'infoBlocks'
},{
title: 'Icons'
route: 'icons'
},{
title: 'Buttons'
route: 'buttons'
},{
title: 'Calendar'
route: 'calendar'
},{
title: 'Typography'
route: 'typography'
},{
title: 'Gallery'
route: 'gallery'
},{
title: 'Header Elements'
route: 'headerElements'
},{
title: 'Content Grid'
route: 'contentGrid'
}]
page:
title: "Interface Components"
subtitle: "Do stuff."
breadcrumbs: []
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "visuals"
path: "/interface-components/visuals"
controller: 'ExampleController'
page:
title: "Visuals & Notifications"
subtitle: "Popups and shit."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "navs"
path: "/interface-components/navs"
controller: 'ExampleController'
page:
title: "Navs"
subtitle: "Drop it like its hot."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Bootstrap navs"
message: "Bootstrap tabs, pills, dropdown with options, media objects and lists, justified components, nav lists, toggles and accordions."
]
},{
route: "panelOptions"
path: "/interface-components/panel-options"
controller: 'ExampleController'
page:
title: "Panel Options"
subtitle: "More options..."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Panel options"
message: "Page contains examples of elements, which can be used in panel headings - labels, badges, buttons, button groups, icons, progress bars etc. Including combinations of panel and inner components."
]
},{
route: "navbars"
path: "/interface-components/navbars"
controller: 'ExampleController'
page:
title: "Navbars"
subtitle: "Get where you need to be."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Navbar options"
message: "Top navigation bar examples - custom template navbar, navigation links and items, text, form, buttons etc."
]
},{
route: "infoBlocks"
path: "/interface-components/info-blocks"
controller: 'ExampleController'
page:
title: "Info Blocks"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Sparklines & Flots"
message: "Custom info elements with charts and other statistics, based on Sparklines and Flot sharts."
]
},{
route: "icons"
path: "/interface-components/icons"
controller: 'ExampleController'
page:
title: "Icons"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Icons"
message: "Template includes awesome set of 850 IcoMoon icons."
]
},{
route: "buttons"
path: "/interface-components/buttons"
controller: 'ExampleController'
page:
title: "Buttons"
subtitle: "Don't touch anything."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-error"
title: "Buttons"
message: "Bootstrap button examples - default, disabled, button groups, with dropups/dropdowns, dropdowns on hover and on click, button toolbars etc."
]
},{
route: "calendar"
path: "/interface-components/calendar"
controller: 'ExampleController'
page:
title: "Calendar"
subtitle: "Now you don't have an excuse for."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Fullcalendar plugin integrated"
message: "Page with integrated Fullcalendar jquery plugin. Including 2 options - inside and outside panels."
]
},{
route: "typography"
path: "/interface-components/typography"
controller: 'ExampleController'
page:
title: "Typography"
subtitle: "Like caligraphy, but easier."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Template typography"
message: "Template typography - styled/unstyled headings, lists, description lists, code/pre, blockquotes, text styles etc."
]
},{
route: "gallery"
path: "/interface-components/gallery"
controller: 'ExampleController'
page:
title: "Gallery"
subtitle: "Image and Video gallery."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Image and Video Gallery"
message: "Media gallery page example with custom toolbar. Includes layout - with/without backgrounds, with title, without title, gallery style and table view."
]
},{
route: "headerElements"
path: "/interface-components/header-elements"
controller: 'ExampleController'
page:
title: "Page Header Elements"
subtitle: "Page header area custom elements."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Page Headers"
message: "2 breadcrumb positions - on top and after page title. Also custom set of different elements which can be used on the right side of page title area - buttons, progress bars, graphs, info's etc."
]
},{
route: "contentGrid"
path: "/interface-components/content-grid"
controller: 'ExampleController'
page:
title: "Content Grid"
subtitle: "Bootstrap responsive content grid."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Bootstrap Content Grid"
message: "Bootstrap mobile first 12 columns grid with examples."
]
},{
route: "invoices"
path: "/invoices"
controller: 'ExampleController'
nav:
priority: 4
icon: 'icon-coin'
children: [{
title: 'Invoice Template'
route: 'invoiceTemplate'
},{
title: 'Invoice List'
route: 'invoiceList'
}]
page:
title: "Invoices"
subtitle: "Gimme my money."
},{
route: "invoiceTemplate"
path: "/invoices/template"
controller: 'ExampleController'
page:
title: "Invoice Template"
subtitle: "Stop repeating yourself."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "invoiceList"
path: "/invoices/list"
controller: 'ExampleController'
page:
title: "Invoice List"
subtitle: "Time to break some kneecaps."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "login"
path: "/login"
controller: "FullPageController"
nav:
priority: 0
icon: 'icon-user-plus'
page:
title: 'Login'
},{
route: "search"
path: "/search"
controller: 'ExampleController'
nav:
priority: 6
icon: 'icon-search3'
page:
title: "Search"
subtitle: "What was I looking for?"
},{
route: "support"
path: "/support"
controller: 'ExampleController'
nav:
priority: 5
icon: 'icon-bubble6'
children: [{
title: 'FAQ'
route: 'faq'
},{
title: 'Chat With Tabs'
route: 'chatWithTabs'
},{
title: 'Chat With Contacts'
route: 'chatWithContacts'
}]
page:
title: "Support"
subtitle: "HALP!"
},{
route: "faq"
path: "/support/faq"
controller: 'ExampleController'
page:
title: "FAQ"
subtitle: "Let me Google that for you."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithTabs"
path: "/support/chat-with-tabs"
controller: 'ExampleController'
page:
title: "Chat With Tabs"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithContacts"
path: "/support/chat-with-contacts"
controller: 'ExampleController'
page:
title: "Chat With Contacts"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "tables"
path: "/tables"
controller: 'ExampleController'
nav:
priority: 8
icon: 'icon-table2'
children: [{
title: 'Static Tables'
route: 'staticTables'
},{
title: 'jQuery Datatables'
route: 'datatables'
}]
page:
title: "Tables"
subtitle: "Yo dawg, heard you like tables."
},{
route: "staticTables"
path: "/tables/static"
controller: 'ExampleController'
page:
title: "Static Tables"
subtitle: "So good it doesn't have to change."
breadcrumbs: [
title: "Tables"
route: 'tables'
]
},{
route: 'datatables'
path: "https://jquery-datatables.meteor.com"
external: true
page:
title: "jQuery Datatables"
subtitle: "Open Source Repo"
},{
route: "taskManager"
path: "/task-manager"
controller: 'ExampleController'
nav:
priority: 2
icon: 'icon-numbered-list'
children: [{
title: 'Task Grid'
route: 'taskGrid'
},{
title: 'Task List'
route: 'taskList'
},{
title: 'Task Detail'
route: 'taskDetail'
}]
page:
title: "Task Manager"
subtitle: "Get stuff done."
callouts: [
cssClass: "callout-info"
title: "Comprehensize task manager that can be easily extended."
message: "The tasks pattern can adapt to any pipeline."
]
},{
route: "taskGrid"
path: "/task-manager/task-grid"
controller: 'ExampleController'
page:
title: "Task Grid"
subtitle: "Task grid with options bar"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskList"
path: "/task-manager/task-list"
controller: 'ExampleController'
page:
title: "Task List"
subtitle: "Task list inside data table"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskDetail"
path: "/task-manager/task-detail"
controller: 'ExampleController'
page:
title: "Task Detail"
subtitle: "Detailed task layout example"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "user"
path: "/user"
controller: 'ExampleController'
nav:
priority: 3
icon: 'icon-user'
children: [{
title: 'Team'
route: 'team'
},{
title: 'Contacts'
route: 'contactList'
},{
title: 'Profile'
route: 'profile'
}]
page:
title: "User"
subtitle: "Be all you can be."
},{
route: "team"
path: "/user/team"
controller: 'ExampleController'
page:
title: "Team"
subtitle: "Because you can't do everything yourself."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "contactList"
path: "/user/contacts"
controller: 'ExampleController'
page:
title: "Contacts"
subtitle: "Now you don't have an excuse for forgetting their name."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "profile"
path: "/user/profile"
controller: 'ExampleController'
page:
title: "Austin Rivas"
subtitle: "Witty Tagline."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: 'dashboard'
path: "/dashboard"
controller: 'ExampleController'
nav:
priority: 1
icon: 'icon-screen2'
page:
title: "Dashboard"
subtitle: "I missed you Austin, it's been 12 hours since your last visit."
tasksTable:
columns: [{
sTitle: "Task Description"
},{
sTitle: "Priority"
sClass: 'task-priority'
},{
sTitle: "Date Added"
sClass: 'task-date-added'
},{
sTitle: "Progress"
sClass: 'task-progress'
},{
sTitle: "Deadline"
sClass: 'task-deadline'
},{
sTitle: ''
sClass: 'task-tools text-center'
}]
rows: [{
_id: 'example'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
},{
_id: '2'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
}]
},{
route: "charts"
path: "/charts"
controller: 'ExampleController'
nav:
priority: 7
icon: 'icon-bars'
page:
title: "Charts"
subtitle: "Yo dawg, heard you like charts."
}]
Router.initialize() | 106127 | Router.addRoutes [
{
route: 'home'
path: '/'
controller: 'ExampleController'
page:
title: "Home"
subtitle: "This isn't really home, its work."
},{
route: 'jQuerySelect2'
path: "https://jquery-select2.meteor.com"
external: true
page:
title: "jQuery Select2"
subtitle: "Open Source Repo"
},{
route: 'gitHub'
path: "https://github.com/lumapictures/meteor-luma-ui"
external: true
page:
title: "GitHub"
subtitle: "Open Source Repo"
nav:
priority: 1000
icon: 'icon-github'
},{
route: 'reportBugs'
path: "https://github.com/lumapictures/meteor-luma-ui/issues/new"
external: true
page:
title: "Report Bugs"
subtitle: "GitHub Issues"
},{
route: 'source'
path: "http://lumapictures.github.io/meteor-luma-ui/"
external: true
page:
title: "Annotated Source"
subtitle: "GitHub pages generated by Groc"
nav:
priority: 1001
icon: 'icon-code'
},{
route: 'build'
path: "https://travis-ci.org/LumaPictures/meteor-luma-ui"
external: true
page:
title: "Build Status"
subtitle: "Continuous Integration by Travis CI"
nav:
priority: 1002
icon: 'icon-cogs'
},{
route: 'errorPages'
path: "/error-pages"
controller: 'ExampleController'
nav:
priority: 0
icon: 'icon-warning'
children: [{
title: 'Not Found'
route: 'error404'
}]
page:
title: 'Error Pages'
},{
route: 'error404'
path: "/error-pages/404"
controller: 'FullPageController'
},{
route: "forms"
path: "/forms"
controller: 'ExampleController'
nav:
priority: 9
icon: 'icon-stack'
children: [{
title: 'Form Snippets'
route: 'formSnippets'
children: [{
title: 'Bug Report Form'
route: 'bugReportForm'
},{
title: 'Feedback Form'
route: 'feedbackForm'
},{
title: 'Job Application Form'
route: 'jobApplicationForm'
},{
title: 'Subscribe Form'
route: 'subscribeForm'
},{
title: 'Simple Contact Form'
route: 'simpleContactForm'
},{
title: 'Advanced Contact Form'
route: 'advancedContactForm'
},{
title: 'Simple Registration Form'
route: 'simpleRegistrationForm'
},{
title: 'Seperated Form'
route: 'seperatedForm'
},{
title: 'Shipping Information Form'
route: 'shippingInformationForm'
},{
title: 'Shipping Method Form'
route: 'shippingMethodForm'
},{
title: 'Billing Address Form'
route: 'billingAddressForm'
},{
title: 'Payment Information Form'
route: 'paymentInformationForm'
}]
},{
title: 'Form Components'
route: 'formComponents'
children: [{
title: 'Form Elements'
route: 'formElements'
children: [{
title: 'Basic Inputs'
route: 'basicInputs'
},{
title: 'Styled Elements'
route: 'styledElements'
},{
title: 'Unstyled Elements'
route: 'unstyledElements'
},{
title: 'Spinners'
route: 'spinners'
}]
},{
title: 'WYSIWYG Editors'
route: 'wysiwygEditors'
},{
title: 'Selects'
route: 'selects'
},{
title: 'jQuery Select2'
route: 'jQuerySelect2'
},{
title: 'Multi File Uploaders'
route: 'multiFileUploaders'
},{
title: 'Input Grids'
route: 'inputGrids'
}]
},{
title: 'Form Layouts'
route: 'formLayouts'
},{
title: 'Form Validation'
route: 'formValidation'
}]
page:
title: 'Forms'
subtitle: 'A necessary evil'
callouts: [
cssClass: "callout-success"
title: "We All Hate Filling Out Forms"
message: "Time to change that."
]
},{
route: "formLayouts"
path: "/forms/form-layouts"
controller: 'ExampleController'
page:
title: "Form Layouts"
subtitle: "Getting Laid Out."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Flexible Form Layouts"
message: "Organized Input = Organized Data."
]
},{
route: "formValidation"
path: "/forms/form-validation"
controller: 'ExampleController'
page:
title: "Form Validation"
subtitle: "Everyone needs a little validation."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-success"
title: "Client Side Validation"
message: "You shouldn't have to wait on the server to know that your data works."
]
},{
route: "formComponents"
path: "/forms/form-components"
controller: 'ExampleController'
page:
title: "Form Components"
subtitle: "Forms are badass."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Reusable Form Components"
message: "All of these components are abstract enough to be used with any data."
]
},{
route: "wysiwygEditors"
path: "/forms/form-components/wysiwyg-editors"
controller: 'ExampleController'
page:
title: "WYSIWYG"
subtitle: "Awesome editors"
breadcrumbs: [{
title: "Forms"
route: "forms"
},{
title: "Form Components"
route: "formComponents"
}]
callouts: [
cssClass: "callout-danger"
title: "WYSIWYG editors"
message: "Content editors are based on awesome WYSIHTML5 plugin. Default examples of editor - inside and outside panel, in modal dialog."
]
},{
route: "multiFileUploaders"
path: "/forms/form-components/multi-file-uploaders"
controller: 'ExampleController'
page:
title: 'Multi File Uploaders'
subtitle: "Two is better than one."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Doing the same thing over and over sucks"
message: "Now you can do it all in one shot."
]
},{
route: "inputGrids"
path: "/forms/form-components/input-grids"
controller: 'ExampleController'
page:
title: 'Input Grids'
subtitle: "Welcome to the Grid."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-info"
title: "Rigid Structure for Dynamic Data"
message: "A flexible grids lets you build a flexible app."
]
},{
route: "selects"
path: "/forms/form-components/selects"
controller: 'ExampleController'
page:
title: "Selects"
subtitle: "You must choose wisely."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Lots of options for selecting lots of options"
message: "Now you can do it all in one shot."
]
},{
route: "formElements"
path: "/forms/form-components/form-elements"
controller: 'ExampleController'
page:
title: 'Form Elements'
subtitle: "It's elementary my dear."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Forms Aren't Sexy"
message: "But that doesn't mean they can just wear sweatpants."
]
},{
route: "basicInputs"
path: "/forms/form-components/form-elements/basic-inputs"
controller: 'ExampleController'
page:
title: 'Basic Inputs'
subtitle: 'Sometimes old fashioned is best.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-success"
title: "Basic Doesn't Mean Stupid"
message: "These input elements are all reactive and will display dynamic data."
]
},{
route: "styledElements"
path: "/forms/form-components/form-elements/styled-elements"
controller: 'ExampleController'
page:
title: 'Styled Elements'
subtitle: 'Form swag.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "Forms, So Hot Right Now"
message: "Who wants to look at something ugly all day?"
]
},{
route: "unstyledElements"
path: "/forms/form-components/form-elements/unstyled-elements"
controller: 'ExampleController'
page:
title: 'Unstyled Elements'
subtitle: "Not ugly, just unstyled."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Plain Ole Checkboxes"
message: "Cause why not."
]
},{
route: "spinners"
path: "/forms/form-components/form-elements/spinners"
controller: 'ExampleController'
page:
title: 'Spinners'
subtitle: 'Right round baby.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "I'm actually hypnotizing you"
message: "When I snap my figures you're going to wake up."
]
},{
route: "formSnippets"
path: "/forms/form-snippets"
controller: 'ExampleController'
page:
title: "Form Snippets"
subtitle: "Snip Snip."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-warning"
title: "Just Getting the Standards Out of the Way"
message: "A couple of ready-to-use form snippets - reports, contact forms, CV, shipping, payment, job listings etc."
]
},{
route: "bugReportForm"
path: "/forms/form-snippets/bug-report-form"
controller: 'ExampleController'
page:
title: "Bug Report Form"
subtitle: "I hate bugs."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "jobApplicationForm"
path: "/forms/form-snippets/job-application-form"
controller: 'ExampleController'
page:
title: "Job Application Form"
subtitle: "Work sucks, but being broke is worse."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleContactForm"
path: "/forms/form-snippets/simple-contact-form"
controller: 'ExampleController'
page:
title: "Simple Contact Form"
subtitle: "Let me get those digits."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "advancedContactForm"
path: "/forms/form-snippets/advanced-contact-form"
controller: 'ExampleController'
page:
title: "Advanced Contact Form"
subtitle: "For that special kind of contact."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleRegistrationForm"
path: "/forms/form-snippets/simple-registration-form"
controller: 'ExampleController'
page:
title: "Simple Registration Form"
subtitle: "You're in the system, man."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "seperatedForm"
path: "/forms/form-snippets/seperated-form"
controller: 'ExampleController'
page:
title: "Seperated Form"
subtitle: "You gotta keep 'em seperated."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "surveyForm"
path: "/forms/form-snippets/survey-form"
controller: 'ExampleController'
page:
title: "Survey Form"
subtitle: "Everyone hates surveys, except these of course."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "feedbackForm"
path: "/forms/form-snippets/feedback-form"
controller: 'ExampleController'
page:
title: "Feedback Form"
subtitle: "My foot gives great feedback."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "subscribeForm"
path: "/forms/form-snippets/subscribe-form"
controller: 'ExampleController'
page:
title: "Subscription Form"
subtitle: "Want some magazines?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingInformationForm"
path: "/forms/form-snippets/shipping-information-form"
controller: 'ExampleController'
page:
title: "Shipping Information Form"
subtitle: "Where are we sending your shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingMethodForm"
path: "/forms/form-snippets/shipping-method-form"
controller: 'ExampleController'
page:
title: "Shipping Method Form"
subtitle: "How do you want your shit wrapped?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "billingAddressForm"
path: "/forms/form-snippets/billing-address-form"
controller: 'ExampleController'
page:
title: "Billing Address Form"
subtitle: "Whose paying for this shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "paymentInformationForm"
path: "/forms/form-snippets/payment-information-form"
controller: 'ExampleController'
page:
title: "Payment Information Form"
subtitle: "And now I have your credit card number..."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "interfaceComponents"
path: "/interface-components"
controller: 'ExampleController'
nav:
priority: 10
icon: 'icon-grid'
children: [{
title: 'Visuals'
route: 'visuals'
},{
title: 'Navs'
route: 'navs'
},{
title: 'Panel Options'
route: 'panelOptions'
},{
title: 'Navbars'
route: 'navbars'
},{
title: 'Info Blocks'
route: 'infoBlocks'
},{
title: 'Icons'
route: 'icons'
},{
title: 'Buttons'
route: 'buttons'
},{
title: 'Calendar'
route: 'calendar'
},{
title: 'Typography'
route: 'typography'
},{
title: 'Gallery'
route: 'gallery'
},{
title: 'Header Elements'
route: 'headerElements'
},{
title: 'Content Grid'
route: 'contentGrid'
}]
page:
title: "Interface Components"
subtitle: "Do stuff."
breadcrumbs: []
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "visuals"
path: "/interface-components/visuals"
controller: 'ExampleController'
page:
title: "Visuals & Notifications"
subtitle: "Popups and shit."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "navs"
path: "/interface-components/navs"
controller: 'ExampleController'
page:
title: "Navs"
subtitle: "Drop it like its hot."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Bootstrap navs"
message: "Bootstrap tabs, pills, dropdown with options, media objects and lists, justified components, nav lists, toggles and accordions."
]
},{
route: "panelOptions"
path: "/interface-components/panel-options"
controller: 'ExampleController'
page:
title: "Panel Options"
subtitle: "More options..."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Panel options"
message: "Page contains examples of elements, which can be used in panel headings - labels, badges, buttons, button groups, icons, progress bars etc. Including combinations of panel and inner components."
]
},{
route: "navbars"
path: "/interface-components/navbars"
controller: 'ExampleController'
page:
title: "Navbars"
subtitle: "Get where you need to be."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Navbar options"
message: "Top navigation bar examples - custom template navbar, navigation links and items, text, form, buttons etc."
]
},{
route: "infoBlocks"
path: "/interface-components/info-blocks"
controller: 'ExampleController'
page:
title: "Info Blocks"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Sparklines & Flots"
message: "Custom info elements with charts and other statistics, based on Sparklines and Flot sharts."
]
},{
route: "icons"
path: "/interface-components/icons"
controller: 'ExampleController'
page:
title: "Icons"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Icons"
message: "Template includes awesome set of 850 IcoMoon icons."
]
},{
route: "buttons"
path: "/interface-components/buttons"
controller: 'ExampleController'
page:
title: "Buttons"
subtitle: "Don't touch anything."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-error"
title: "Buttons"
message: "Bootstrap button examples - default, disabled, button groups, with dropups/dropdowns, dropdowns on hover and on click, button toolbars etc."
]
},{
route: "calendar"
path: "/interface-components/calendar"
controller: 'ExampleController'
page:
title: "Calendar"
subtitle: "Now you don't have an excuse for."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Fullcalendar plugin integrated"
message: "Page with integrated Fullcalendar jquery plugin. Including 2 options - inside and outside panels."
]
},{
route: "typography"
path: "/interface-components/typography"
controller: 'ExampleController'
page:
title: "Typography"
subtitle: "Like caligraphy, but easier."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Template typography"
message: "Template typography - styled/unstyled headings, lists, description lists, code/pre, blockquotes, text styles etc."
]
},{
route: "gallery"
path: "/interface-components/gallery"
controller: 'ExampleController'
page:
title: "Gallery"
subtitle: "Image and Video gallery."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Image and Video Gallery"
message: "Media gallery page example with custom toolbar. Includes layout - with/without backgrounds, with title, without title, gallery style and table view."
]
},{
route: "headerElements"
path: "/interface-components/header-elements"
controller: 'ExampleController'
page:
title: "Page Header Elements"
subtitle: "Page header area custom elements."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Page Headers"
message: "2 breadcrumb positions - on top and after page title. Also custom set of different elements which can be used on the right side of page title area - buttons, progress bars, graphs, info's etc."
]
},{
route: "contentGrid"
path: "/interface-components/content-grid"
controller: 'ExampleController'
page:
title: "Content Grid"
subtitle: "Bootstrap responsive content grid."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Bootstrap Content Grid"
message: "Bootstrap mobile first 12 columns grid with examples."
]
},{
route: "invoices"
path: "/invoices"
controller: 'ExampleController'
nav:
priority: 4
icon: 'icon-coin'
children: [{
title: 'Invoice Template'
route: 'invoiceTemplate'
},{
title: 'Invoice List'
route: 'invoiceList'
}]
page:
title: "Invoices"
subtitle: "Gimme my money."
},{
route: "invoiceTemplate"
path: "/invoices/template"
controller: 'ExampleController'
page:
title: "Invoice Template"
subtitle: "Stop repeating yourself."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "invoiceList"
path: "/invoices/list"
controller: 'ExampleController'
page:
title: "Invoice List"
subtitle: "Time to break some kneecaps."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "login"
path: "/login"
controller: "FullPageController"
nav:
priority: 0
icon: 'icon-user-plus'
page:
title: 'Login'
},{
route: "search"
path: "/search"
controller: 'ExampleController'
nav:
priority: 6
icon: 'icon-search3'
page:
title: "Search"
subtitle: "What was I looking for?"
},{
route: "support"
path: "/support"
controller: 'ExampleController'
nav:
priority: 5
icon: 'icon-bubble6'
children: [{
title: 'FAQ'
route: 'faq'
},{
title: 'Chat With Tabs'
route: 'chatWithTabs'
},{
title: 'Chat With Contacts'
route: 'chatWithContacts'
}]
page:
title: "Support"
subtitle: "HALP!"
},{
route: "faq"
path: "/support/faq"
controller: 'ExampleController'
page:
title: "FAQ"
subtitle: "Let me Google that for you."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithTabs"
path: "/support/chat-with-tabs"
controller: 'ExampleController'
page:
title: "Chat With Tabs"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithContacts"
path: "/support/chat-with-contacts"
controller: 'ExampleController'
page:
title: "Chat With Contacts"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "tables"
path: "/tables"
controller: 'ExampleController'
nav:
priority: 8
icon: 'icon-table2'
children: [{
title: 'Static Tables'
route: 'staticTables'
},{
title: 'jQuery Datatables'
route: 'datatables'
}]
page:
title: "Tables"
subtitle: "Yo dawg, heard you like tables."
},{
route: "staticTables"
path: "/tables/static"
controller: 'ExampleController'
page:
title: "Static Tables"
subtitle: "So good it doesn't have to change."
breadcrumbs: [
title: "Tables"
route: 'tables'
]
},{
route: 'datatables'
path: "https://jquery-datatables.meteor.com"
external: true
page:
title: "jQuery Datatables"
subtitle: "Open Source Repo"
},{
route: "taskManager"
path: "/task-manager"
controller: 'ExampleController'
nav:
priority: 2
icon: 'icon-numbered-list'
children: [{
title: 'Task Grid'
route: 'taskGrid'
},{
title: 'Task List'
route: 'taskList'
},{
title: 'Task Detail'
route: 'taskDetail'
}]
page:
title: "Task Manager"
subtitle: "Get stuff done."
callouts: [
cssClass: "callout-info"
title: "Comprehensize task manager that can be easily extended."
message: "The tasks pattern can adapt to any pipeline."
]
},{
route: "taskGrid"
path: "/task-manager/task-grid"
controller: 'ExampleController'
page:
title: "Task Grid"
subtitle: "Task grid with options bar"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskList"
path: "/task-manager/task-list"
controller: 'ExampleController'
page:
title: "Task List"
subtitle: "Task list inside data table"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskDetail"
path: "/task-manager/task-detail"
controller: 'ExampleController'
page:
title: "Task Detail"
subtitle: "Detailed task layout example"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "user"
path: "/user"
controller: 'ExampleController'
nav:
priority: 3
icon: 'icon-user'
children: [{
title: 'Team'
route: 'team'
},{
title: 'Contacts'
route: 'contactList'
},{
title: 'Profile'
route: 'profile'
}]
page:
title: "User"
subtitle: "Be all you can be."
},{
route: "team"
path: "/user/team"
controller: 'ExampleController'
page:
title: "Team"
subtitle: "Because you can't do everything yourself."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "contactList"
path: "/user/contacts"
controller: 'ExampleController'
page:
title: "Contacts"
subtitle: "Now you don't have an excuse for forgetting their name."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "profile"
path: "/user/profile"
controller: 'ExampleController'
page:
title: "<NAME>"
subtitle: "Witty Tagline."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: 'dashboard'
path: "/dashboard"
controller: 'ExampleController'
nav:
priority: 1
icon: 'icon-screen2'
page:
title: "Dashboard"
subtitle: "I missed you <NAME> <NAME>, it's been 12 hours since your last visit."
tasksTable:
columns: [{
sTitle: "Task Description"
},{
sTitle: "Priority"
sClass: 'task-priority'
},{
sTitle: "Date Added"
sClass: 'task-date-added'
},{
sTitle: "Progress"
sClass: 'task-progress'
},{
sTitle: "Deadline"
sClass: 'task-deadline'
},{
sTitle: ''
sClass: 'task-tools text-center'
}]
rows: [{
_id: 'example'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
},{
_id: '2'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
}]
},{
route: "charts"
path: "/charts"
controller: 'ExampleController'
nav:
priority: 7
icon: 'icon-bars'
page:
title: "Charts"
subtitle: "Yo dawg, heard you like charts."
}]
Router.initialize() | true | Router.addRoutes [
{
route: 'home'
path: '/'
controller: 'ExampleController'
page:
title: "Home"
subtitle: "This isn't really home, its work."
},{
route: 'jQuerySelect2'
path: "https://jquery-select2.meteor.com"
external: true
page:
title: "jQuery Select2"
subtitle: "Open Source Repo"
},{
route: 'gitHub'
path: "https://github.com/lumapictures/meteor-luma-ui"
external: true
page:
title: "GitHub"
subtitle: "Open Source Repo"
nav:
priority: 1000
icon: 'icon-github'
},{
route: 'reportBugs'
path: "https://github.com/lumapictures/meteor-luma-ui/issues/new"
external: true
page:
title: "Report Bugs"
subtitle: "GitHub Issues"
},{
route: 'source'
path: "http://lumapictures.github.io/meteor-luma-ui/"
external: true
page:
title: "Annotated Source"
subtitle: "GitHub pages generated by Groc"
nav:
priority: 1001
icon: 'icon-code'
},{
route: 'build'
path: "https://travis-ci.org/LumaPictures/meteor-luma-ui"
external: true
page:
title: "Build Status"
subtitle: "Continuous Integration by Travis CI"
nav:
priority: 1002
icon: 'icon-cogs'
},{
route: 'errorPages'
path: "/error-pages"
controller: 'ExampleController'
nav:
priority: 0
icon: 'icon-warning'
children: [{
title: 'Not Found'
route: 'error404'
}]
page:
title: 'Error Pages'
},{
route: 'error404'
path: "/error-pages/404"
controller: 'FullPageController'
},{
route: "forms"
path: "/forms"
controller: 'ExampleController'
nav:
priority: 9
icon: 'icon-stack'
children: [{
title: 'Form Snippets'
route: 'formSnippets'
children: [{
title: 'Bug Report Form'
route: 'bugReportForm'
},{
title: 'Feedback Form'
route: 'feedbackForm'
},{
title: 'Job Application Form'
route: 'jobApplicationForm'
},{
title: 'Subscribe Form'
route: 'subscribeForm'
},{
title: 'Simple Contact Form'
route: 'simpleContactForm'
},{
title: 'Advanced Contact Form'
route: 'advancedContactForm'
},{
title: 'Simple Registration Form'
route: 'simpleRegistrationForm'
},{
title: 'Seperated Form'
route: 'seperatedForm'
},{
title: 'Shipping Information Form'
route: 'shippingInformationForm'
},{
title: 'Shipping Method Form'
route: 'shippingMethodForm'
},{
title: 'Billing Address Form'
route: 'billingAddressForm'
},{
title: 'Payment Information Form'
route: 'paymentInformationForm'
}]
},{
title: 'Form Components'
route: 'formComponents'
children: [{
title: 'Form Elements'
route: 'formElements'
children: [{
title: 'Basic Inputs'
route: 'basicInputs'
},{
title: 'Styled Elements'
route: 'styledElements'
},{
title: 'Unstyled Elements'
route: 'unstyledElements'
},{
title: 'Spinners'
route: 'spinners'
}]
},{
title: 'WYSIWYG Editors'
route: 'wysiwygEditors'
},{
title: 'Selects'
route: 'selects'
},{
title: 'jQuery Select2'
route: 'jQuerySelect2'
},{
title: 'Multi File Uploaders'
route: 'multiFileUploaders'
},{
title: 'Input Grids'
route: 'inputGrids'
}]
},{
title: 'Form Layouts'
route: 'formLayouts'
},{
title: 'Form Validation'
route: 'formValidation'
}]
page:
title: 'Forms'
subtitle: 'A necessary evil'
callouts: [
cssClass: "callout-success"
title: "We All Hate Filling Out Forms"
message: "Time to change that."
]
},{
route: "formLayouts"
path: "/forms/form-layouts"
controller: 'ExampleController'
page:
title: "Form Layouts"
subtitle: "Getting Laid Out."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Flexible Form Layouts"
message: "Organized Input = Organized Data."
]
},{
route: "formValidation"
path: "/forms/form-validation"
controller: 'ExampleController'
page:
title: "Form Validation"
subtitle: "Everyone needs a little validation."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-success"
title: "Client Side Validation"
message: "You shouldn't have to wait on the server to know that your data works."
]
},{
route: "formComponents"
path: "/forms/form-components"
controller: 'ExampleController'
page:
title: "Form Components"
subtitle: "Forms are badass."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-danger"
title: "Reusable Form Components"
message: "All of these components are abstract enough to be used with any data."
]
},{
route: "wysiwygEditors"
path: "/forms/form-components/wysiwyg-editors"
controller: 'ExampleController'
page:
title: "WYSIWYG"
subtitle: "Awesome editors"
breadcrumbs: [{
title: "Forms"
route: "forms"
},{
title: "Form Components"
route: "formComponents"
}]
callouts: [
cssClass: "callout-danger"
title: "WYSIWYG editors"
message: "Content editors are based on awesome WYSIHTML5 plugin. Default examples of editor - inside and outside panel, in modal dialog."
]
},{
route: "multiFileUploaders"
path: "/forms/form-components/multi-file-uploaders"
controller: 'ExampleController'
page:
title: 'Multi File Uploaders'
subtitle: "Two is better than one."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Doing the same thing over and over sucks"
message: "Now you can do it all in one shot."
]
},{
route: "inputGrids"
path: "/forms/form-components/input-grids"
controller: 'ExampleController'
page:
title: 'Input Grids'
subtitle: "Welcome to the Grid."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-info"
title: "Rigid Structure for Dynamic Data"
message: "A flexible grids lets you build a flexible app."
]
},{
route: "selects"
path: "/forms/form-components/selects"
controller: 'ExampleController'
page:
title: "Selects"
subtitle: "You must choose wisely."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-success"
title: "Lots of options for selecting lots of options"
message: "Now you can do it all in one shot."
]
},{
route: "formElements"
path: "/forms/form-components/form-elements"
controller: 'ExampleController'
page:
title: 'Form Elements'
subtitle: "It's elementary my dear."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Forms Aren't Sexy"
message: "But that doesn't mean they can just wear sweatpants."
]
},{
route: "basicInputs"
path: "/forms/form-components/form-elements/basic-inputs"
controller: 'ExampleController'
page:
title: 'Basic Inputs'
subtitle: 'Sometimes old fashioned is best.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-success"
title: "Basic Doesn't Mean Stupid"
message: "These input elements are all reactive and will display dynamic data."
]
},{
route: "styledElements"
path: "/forms/form-components/form-elements/styled-elements"
controller: 'ExampleController'
page:
title: 'Styled Elements'
subtitle: 'Form swag.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "Forms, So Hot Right Now"
message: "Who wants to look at something ugly all day?"
]
},{
route: "unstyledElements"
path: "/forms/form-components/form-elements/unstyled-elements"
controller: 'ExampleController'
page:
title: 'Unstyled Elements'
subtitle: "Not ugly, just unstyled."
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
}]
callouts: [
cssClass: "callout-warning"
title: "Plain Ole Checkboxes"
message: "Cause why not."
]
},{
route: "spinners"
path: "/forms/form-components/form-elements/spinners"
controller: 'ExampleController'
page:
title: 'Spinners'
subtitle: 'Right round baby.'
breadcrumbs: [{
title: 'Forms'
route: 'forms'
},{
title: 'Form Components'
route: 'formComponents'
},{
title: 'Form Elements'
route: 'formElements'
}]
callouts: [
cssClass: "callout-info"
title: "I'm actually hypnotizing you"
message: "When I snap my figures you're going to wake up."
]
},{
route: "formSnippets"
path: "/forms/form-snippets"
controller: 'ExampleController'
page:
title: "Form Snippets"
subtitle: "Snip Snip."
breadcrumbs: [
title: "Forms"
route: 'forms'
]
callouts: [
cssClass: "callout-warning"
title: "Just Getting the Standards Out of the Way"
message: "A couple of ready-to-use form snippets - reports, contact forms, CV, shipping, payment, job listings etc."
]
},{
route: "bugReportForm"
path: "/forms/form-snippets/bug-report-form"
controller: 'ExampleController'
page:
title: "Bug Report Form"
subtitle: "I hate bugs."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "jobApplicationForm"
path: "/forms/form-snippets/job-application-form"
controller: 'ExampleController'
page:
title: "Job Application Form"
subtitle: "Work sucks, but being broke is worse."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleContactForm"
path: "/forms/form-snippets/simple-contact-form"
controller: 'ExampleController'
page:
title: "Simple Contact Form"
subtitle: "Let me get those digits."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "advancedContactForm"
path: "/forms/form-snippets/advanced-contact-form"
controller: 'ExampleController'
page:
title: "Advanced Contact Form"
subtitle: "For that special kind of contact."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "simpleRegistrationForm"
path: "/forms/form-snippets/simple-registration-form"
controller: 'ExampleController'
page:
title: "Simple Registration Form"
subtitle: "You're in the system, man."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "seperatedForm"
path: "/forms/form-snippets/seperated-form"
controller: 'ExampleController'
page:
title: "Seperated Form"
subtitle: "You gotta keep 'em seperated."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "surveyForm"
path: "/forms/form-snippets/survey-form"
controller: 'ExampleController'
page:
title: "Survey Form"
subtitle: "Everyone hates surveys, except these of course."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "feedbackForm"
path: "/forms/form-snippets/feedback-form"
controller: 'ExampleController'
page:
title: "Feedback Form"
subtitle: "My foot gives great feedback."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "subscribeForm"
path: "/forms/form-snippets/subscribe-form"
controller: 'ExampleController'
page:
title: "Subscription Form"
subtitle: "Want some magazines?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingInformationForm"
path: "/forms/form-snippets/shipping-information-form"
controller: 'ExampleController'
page:
title: "Shipping Information Form"
subtitle: "Where are we sending your shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "shippingMethodForm"
path: "/forms/form-snippets/shipping-method-form"
controller: 'ExampleController'
page:
title: "Shipping Method Form"
subtitle: "How do you want your shit wrapped?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "billingAddressForm"
path: "/forms/form-snippets/billing-address-form"
controller: 'ExampleController'
page:
title: "Billing Address Form"
subtitle: "Whose paying for this shit?"
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "paymentInformationForm"
path: "/forms/form-snippets/payment-information-form"
controller: 'ExampleController'
page:
title: "Payment Information Form"
subtitle: "And now I have your credit card number..."
breadcrumbs: [{
title: "Forms"
route: 'forms'
},{
title: "Form Snippets"
route: 'formSnippets'
}]
},{
route: "interfaceComponents"
path: "/interface-components"
controller: 'ExampleController'
nav:
priority: 10
icon: 'icon-grid'
children: [{
title: 'Visuals'
route: 'visuals'
},{
title: 'Navs'
route: 'navs'
},{
title: 'Panel Options'
route: 'panelOptions'
},{
title: 'Navbars'
route: 'navbars'
},{
title: 'Info Blocks'
route: 'infoBlocks'
},{
title: 'Icons'
route: 'icons'
},{
title: 'Buttons'
route: 'buttons'
},{
title: 'Calendar'
route: 'calendar'
},{
title: 'Typography'
route: 'typography'
},{
title: 'Gallery'
route: 'gallery'
},{
title: 'Header Elements'
route: 'headerElements'
},{
title: 'Content Grid'
route: 'contentGrid'
}]
page:
title: "Interface Components"
subtitle: "Do stuff."
breadcrumbs: []
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "visuals"
path: "/interface-components/visuals"
controller: 'ExampleController'
page:
title: "Visuals & Notifications"
subtitle: "Popups and shit."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-danger"
title: "User interface components"
message: "Page contains default Bootstrap/jQuery UI and custom visual components and notifications."
]
},{
route: "navs"
path: "/interface-components/navs"
controller: 'ExampleController'
page:
title: "Navs"
subtitle: "Drop it like its hot."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Bootstrap navs"
message: "Bootstrap tabs, pills, dropdown with options, media objects and lists, justified components, nav lists, toggles and accordions."
]
},{
route: "panelOptions"
path: "/interface-components/panel-options"
controller: 'ExampleController'
page:
title: "Panel Options"
subtitle: "More options..."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Panel options"
message: "Page contains examples of elements, which can be used in panel headings - labels, badges, buttons, button groups, icons, progress bars etc. Including combinations of panel and inner components."
]
},{
route: "navbars"
path: "/interface-components/navbars"
controller: 'ExampleController'
page:
title: "Navbars"
subtitle: "Get where you need to be."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Navbar options"
message: "Top navigation bar examples - custom template navbar, navigation links and items, text, form, buttons etc."
]
},{
route: "infoBlocks"
path: "/interface-components/info-blocks"
controller: 'ExampleController'
page:
title: "Info Blocks"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-info"
title: "Sparklines & Flots"
message: "Custom info elements with charts and other statistics, based on Sparklines and Flot sharts."
]
},{
route: "icons"
path: "/interface-components/icons"
controller: 'ExampleController'
page:
title: "Icons"
subtitle: "Like Minecraft, but not."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Icons"
message: "Template includes awesome set of 850 IcoMoon icons."
]
},{
route: "buttons"
path: "/interface-components/buttons"
controller: 'ExampleController'
page:
title: "Buttons"
subtitle: "Don't touch anything."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-error"
title: "Buttons"
message: "Bootstrap button examples - default, disabled, button groups, with dropups/dropdowns, dropdowns on hover and on click, button toolbars etc."
]
},{
route: "calendar"
path: "/interface-components/calendar"
controller: 'ExampleController'
page:
title: "Calendar"
subtitle: "Now you don't have an excuse for."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Fullcalendar plugin integrated"
message: "Page with integrated Fullcalendar jquery plugin. Including 2 options - inside and outside panels."
]
},{
route: "typography"
path: "/interface-components/typography"
controller: 'ExampleController'
page:
title: "Typography"
subtitle: "Like caligraphy, but easier."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-success"
title: "Template typography"
message: "Template typography - styled/unstyled headings, lists, description lists, code/pre, blockquotes, text styles etc."
]
},{
route: "gallery"
path: "/interface-components/gallery"
controller: 'ExampleController'
page:
title: "Gallery"
subtitle: "Image and Video gallery."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Image and Video Gallery"
message: "Media gallery page example with custom toolbar. Includes layout - with/without backgrounds, with title, without title, gallery style and table view."
]
},{
route: "headerElements"
path: "/interface-components/header-elements"
controller: 'ExampleController'
page:
title: "Page Header Elements"
subtitle: "Page header area custom elements."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Page Headers"
message: "2 breadcrumb positions - on top and after page title. Also custom set of different elements which can be used on the right side of page title area - buttons, progress bars, graphs, info's etc."
]
},{
route: "contentGrid"
path: "/interface-components/content-grid"
controller: 'ExampleController'
page:
title: "Content Grid"
subtitle: "Bootstrap responsive content grid."
breadcrumbs: [
title: "Interface Components"
route: 'interfaceComponents'
]
callouts: [
cssClass: "callout-warning"
title: "Bootstrap Content Grid"
message: "Bootstrap mobile first 12 columns grid with examples."
]
},{
route: "invoices"
path: "/invoices"
controller: 'ExampleController'
nav:
priority: 4
icon: 'icon-coin'
children: [{
title: 'Invoice Template'
route: 'invoiceTemplate'
},{
title: 'Invoice List'
route: 'invoiceList'
}]
page:
title: "Invoices"
subtitle: "Gimme my money."
},{
route: "invoiceTemplate"
path: "/invoices/template"
controller: 'ExampleController'
page:
title: "Invoice Template"
subtitle: "Stop repeating yourself."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "invoiceList"
path: "/invoices/list"
controller: 'ExampleController'
page:
title: "Invoice List"
subtitle: "Time to break some kneecaps."
breadcrumbs: [
title: "Invoices"
route: 'invoices'
]
},{
route: "login"
path: "/login"
controller: "FullPageController"
nav:
priority: 0
icon: 'icon-user-plus'
page:
title: 'Login'
},{
route: "search"
path: "/search"
controller: 'ExampleController'
nav:
priority: 6
icon: 'icon-search3'
page:
title: "Search"
subtitle: "What was I looking for?"
},{
route: "support"
path: "/support"
controller: 'ExampleController'
nav:
priority: 5
icon: 'icon-bubble6'
children: [{
title: 'FAQ'
route: 'faq'
},{
title: 'Chat With Tabs'
route: 'chatWithTabs'
},{
title: 'Chat With Contacts'
route: 'chatWithContacts'
}]
page:
title: "Support"
subtitle: "HALP!"
},{
route: "faq"
path: "/support/faq"
controller: 'ExampleController'
page:
title: "FAQ"
subtitle: "Let me Google that for you."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithTabs"
path: "/support/chat-with-tabs"
controller: 'ExampleController'
page:
title: "Chat With Tabs"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "chatWithContacts"
path: "/support/chat-with-contacts"
controller: 'ExampleController'
page:
title: "Chat With Contacts"
subtitle: "Figure it out."
breadcrumbs: [
title: "Support"
route: 'support'
]
},{
route: "tables"
path: "/tables"
controller: 'ExampleController'
nav:
priority: 8
icon: 'icon-table2'
children: [{
title: 'Static Tables'
route: 'staticTables'
},{
title: 'jQuery Datatables'
route: 'datatables'
}]
page:
title: "Tables"
subtitle: "Yo dawg, heard you like tables."
},{
route: "staticTables"
path: "/tables/static"
controller: 'ExampleController'
page:
title: "Static Tables"
subtitle: "So good it doesn't have to change."
breadcrumbs: [
title: "Tables"
route: 'tables'
]
},{
route: 'datatables'
path: "https://jquery-datatables.meteor.com"
external: true
page:
title: "jQuery Datatables"
subtitle: "Open Source Repo"
},{
route: "taskManager"
path: "/task-manager"
controller: 'ExampleController'
nav:
priority: 2
icon: 'icon-numbered-list'
children: [{
title: 'Task Grid'
route: 'taskGrid'
},{
title: 'Task List'
route: 'taskList'
},{
title: 'Task Detail'
route: 'taskDetail'
}]
page:
title: "Task Manager"
subtitle: "Get stuff done."
callouts: [
cssClass: "callout-info"
title: "Comprehensize task manager that can be easily extended."
message: "The tasks pattern can adapt to any pipeline."
]
},{
route: "taskGrid"
path: "/task-manager/task-grid"
controller: 'ExampleController'
page:
title: "Task Grid"
subtitle: "Task grid with options bar"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskList"
path: "/task-manager/task-list"
controller: 'ExampleController'
page:
title: "Task List"
subtitle: "Task list inside data table"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "taskDetail"
path: "/task-manager/task-detail"
controller: 'ExampleController'
page:
title: "Task Detail"
subtitle: "Detailed task layout example"
breadcrumbs: [
title: "Task Manager"
route: 'taskManager'
]
},{
route: "user"
path: "/user"
controller: 'ExampleController'
nav:
priority: 3
icon: 'icon-user'
children: [{
title: 'Team'
route: 'team'
},{
title: 'Contacts'
route: 'contactList'
},{
title: 'Profile'
route: 'profile'
}]
page:
title: "User"
subtitle: "Be all you can be."
},{
route: "team"
path: "/user/team"
controller: 'ExampleController'
page:
title: "Team"
subtitle: "Because you can't do everything yourself."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "contactList"
path: "/user/contacts"
controller: 'ExampleController'
page:
title: "Contacts"
subtitle: "Now you don't have an excuse for forgetting their name."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: "profile"
path: "/user/profile"
controller: 'ExampleController'
page:
title: "PI:NAME:<NAME>END_PI"
subtitle: "Witty Tagline."
breadcrumbs: [
title: "User"
route: 'user'
]
},{
route: 'dashboard'
path: "/dashboard"
controller: 'ExampleController'
nav:
priority: 1
icon: 'icon-screen2'
page:
title: "Dashboard"
subtitle: "I missed you PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI, it's been 12 hours since your last visit."
tasksTable:
columns: [{
sTitle: "Task Description"
},{
sTitle: "Priority"
sClass: 'task-priority'
},{
sTitle: "Date Added"
sClass: 'task-date-added'
},{
sTitle: "Progress"
sClass: 'task-progress'
},{
sTitle: "Deadline"
sClass: 'task-deadline'
},{
sTitle: ''
sClass: 'task-tools text-center'
}]
rows: [{
_id: 'example'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
},{
_id: '2'
description: 'This is a description for a task.'
subtitle: 'I am a subtitle!'
priority: 'High'
date_added: 'September 20, 2013'
progress: 90
deadline: 14
}]
},{
route: "charts"
path: "/charts"
controller: 'ExampleController'
nav:
priority: 7
icon: 'icon-bars'
page:
title: "Charts"
subtitle: "Yo dawg, heard you like charts."
}]
Router.initialize() |
[
{
"context": "\"https://spreadsheets.google.com/feeds/worksheets/1vyPu1EtzU1DvGXfthjrR-blJ8mGe75TL4BFNWtFMm0I/public/basic?alt=json\"\n dest: 'spec/fixtur",
"end": 526,
"score": 0.9828590750694275,
"start": 482,
"tag": "KEY",
"value": "1vyPu1EtzU1DvGXfthjrR-blJ8mGe75TL4BFNWtFMm0I"
}... | google-spreadsheets-parser-0.2.0/Gruntfile.coffee | gordonhu7/SRCCalendar | 0 | module.exports = (grunt)->
grunt.initConfig
coffee:
dist:
options:
bare: true
join: true
files:
'dist/googleSpreadsheetsParser.js': [
'src/googleSpreadsheetsUtil.coffee'
'src/googleSpreadsheetsParser.coffee'
]
karma:
unit:
configFile: 'karma.conf.coffee'
http:
sampleDataBasic:
options:
url: "https://spreadsheets.google.com/feeds/worksheets/1vyPu1EtzU1DvGXfthjrR-blJ8mGe75TL4BFNWtFMm0I/public/basic?alt=json"
dest: 'spec/fixtures/sampleDataBasic.json'
sampleDataFeed:
options:
url: "https://spreadsheets.google.com/feeds/cells/1vyPu1EtzU1DvGXfthjrR-blJ8mGe75TL4BFNWtFMm0I/od6/public/values?alt=json"
dest: 'spec/fixtures/sampleDataFeed.json'
'closure-compiler':
publish:
js: 'dist/googleSpreadsheetsParser.js'
jsOutputFile: 'dist/googleSpreadsheetsParser.min.js'
noreport: true
options:
compilation_level: 'SIMPLE_OPTIMIZATIONS'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-karma'
grunt.loadNpmTasks 'grunt-http'
grunt.loadNpmTasks 'grunt-closure-compiler'
grunt.registerTask 'spec', ['http', 'karma']
grunt.registerTask 'publish', ['coffee:dist', 'closure-compiler:publish']
| 199531 | module.exports = (grunt)->
grunt.initConfig
coffee:
dist:
options:
bare: true
join: true
files:
'dist/googleSpreadsheetsParser.js': [
'src/googleSpreadsheetsUtil.coffee'
'src/googleSpreadsheetsParser.coffee'
]
karma:
unit:
configFile: 'karma.conf.coffee'
http:
sampleDataBasic:
options:
url: "https://spreadsheets.google.com/feeds/worksheets/<KEY>/public/basic?alt=json"
dest: 'spec/fixtures/sampleDataBasic.json'
sampleDataFeed:
options:
url: "https://spreadsheets.google.com/feeds/cells/<KEY>/od6/public/values?alt=json"
dest: 'spec/fixtures/sampleDataFeed.json'
'closure-compiler':
publish:
js: 'dist/googleSpreadsheetsParser.js'
jsOutputFile: 'dist/googleSpreadsheetsParser.min.js'
noreport: true
options:
compilation_level: 'SIMPLE_OPTIMIZATIONS'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-karma'
grunt.loadNpmTasks 'grunt-http'
grunt.loadNpmTasks 'grunt-closure-compiler'
grunt.registerTask 'spec', ['http', 'karma']
grunt.registerTask 'publish', ['coffee:dist', 'closure-compiler:publish']
| true | module.exports = (grunt)->
grunt.initConfig
coffee:
dist:
options:
bare: true
join: true
files:
'dist/googleSpreadsheetsParser.js': [
'src/googleSpreadsheetsUtil.coffee'
'src/googleSpreadsheetsParser.coffee'
]
karma:
unit:
configFile: 'karma.conf.coffee'
http:
sampleDataBasic:
options:
url: "https://spreadsheets.google.com/feeds/worksheets/PI:KEY:<KEY>END_PI/public/basic?alt=json"
dest: 'spec/fixtures/sampleDataBasic.json'
sampleDataFeed:
options:
url: "https://spreadsheets.google.com/feeds/cells/PI:KEY:<KEY>END_PI/od6/public/values?alt=json"
dest: 'spec/fixtures/sampleDataFeed.json'
'closure-compiler':
publish:
js: 'dist/googleSpreadsheetsParser.js'
jsOutputFile: 'dist/googleSpreadsheetsParser.min.js'
noreport: true
options:
compilation_level: 'SIMPLE_OPTIMIZATIONS'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-karma'
grunt.loadNpmTasks 'grunt-http'
grunt.loadNpmTasks 'grunt-closure-compiler'
grunt.registerTask 'spec', ['http', 'karma']
grunt.registerTask 'publish', ['coffee:dist', 'closure-compiler:publish']
|
[
{
"context": "cKey\n\n @HTTP_SIGNATURE_OPTIONS =\n keyId: 'credentials-service-uuid'\n key: privateKey\n headers: [ 'date', '",
"end": 577,
"score": 0.9935518503189087,
"start": 553,
"tag": "KEY",
"value": "credentials-service-uuid"
}
] | test/integration/get-credentials-spec.coffee | octoblu/credentials-service | 0 | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
JobManager = require 'meshblu-core-job-manager'
redis = require 'fakeredis'
RedisNS = require '@octoblu/redis-ns'
uuid = require 'uuid'
{publicKey, privateKey} = require '../keys.json'
describe 'Get Credentials', ->
beforeEach (done) ->
serverOptions =
port: undefined,
disableLogging: true
publicKey:
publicKey: publicKey
@HTTP_SIGNATURE_OPTIONS =
keyId: 'credentials-service-uuid'
key: privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
@redisKey = uuid.v1()
client = new RedisNS 'credentials', redis.createClient @redisKey
jobManager = new JobManager client: client, timeoutSeconds: 1
testClient = new RedisNS 'credentials', redis.createClient @redisKey
@testJobManager = new JobManager client: testClient, timeoutSeconds: 1
@server = new Server serverOptions, {jobManager,credentialsUuid:'credentials-service-uuid'}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
describe 'when a valid request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'credentials-service-uuid'
json:
fromUuid: 'flow-uuid'
payload:
nodeId: 'node-uuid'
transactionId: 'message-uuid'
request.post options, (error, @response, @body) =>
done error
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
describe 'it should store the request into a queue', ->
beforeEach (done) ->
@testJobManager.getRequest ['request'], (error, @result) => done error
it 'should return the request', ->
expect(@result.metadata).to.containSubset
flowId: 'flow-uuid'
nodeId: 'node-uuid'
toNodeId: 'engine-input'
transactionId: 'message-uuid'
describe 'when an unauthorized request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'some-other-uuid'
json: true
request.post options, (error, @response, @body) =>
done error
it 'should return a 422', ->
expect(@response.statusCode).to.equal 422
| 88202 | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
JobManager = require 'meshblu-core-job-manager'
redis = require 'fakeredis'
RedisNS = require '@octoblu/redis-ns'
uuid = require 'uuid'
{publicKey, privateKey} = require '../keys.json'
describe 'Get Credentials', ->
beforeEach (done) ->
serverOptions =
port: undefined,
disableLogging: true
publicKey:
publicKey: publicKey
@HTTP_SIGNATURE_OPTIONS =
keyId: '<KEY>'
key: privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
@redisKey = uuid.v1()
client = new RedisNS 'credentials', redis.createClient @redisKey
jobManager = new JobManager client: client, timeoutSeconds: 1
testClient = new RedisNS 'credentials', redis.createClient @redisKey
@testJobManager = new JobManager client: testClient, timeoutSeconds: 1
@server = new Server serverOptions, {jobManager,credentialsUuid:'credentials-service-uuid'}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
describe 'when a valid request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'credentials-service-uuid'
json:
fromUuid: 'flow-uuid'
payload:
nodeId: 'node-uuid'
transactionId: 'message-uuid'
request.post options, (error, @response, @body) =>
done error
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
describe 'it should store the request into a queue', ->
beforeEach (done) ->
@testJobManager.getRequest ['request'], (error, @result) => done error
it 'should return the request', ->
expect(@result.metadata).to.containSubset
flowId: 'flow-uuid'
nodeId: 'node-uuid'
toNodeId: 'engine-input'
transactionId: 'message-uuid'
describe 'when an unauthorized request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'some-other-uuid'
json: true
request.post options, (error, @response, @body) =>
done error
it 'should return a 422', ->
expect(@response.statusCode).to.equal 422
| true | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
JobManager = require 'meshblu-core-job-manager'
redis = require 'fakeredis'
RedisNS = require '@octoblu/redis-ns'
uuid = require 'uuid'
{publicKey, privateKey} = require '../keys.json'
describe 'Get Credentials', ->
beforeEach (done) ->
serverOptions =
port: undefined,
disableLogging: true
publicKey:
publicKey: publicKey
@HTTP_SIGNATURE_OPTIONS =
keyId: 'PI:KEY:<KEY>END_PI'
key: privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
@redisKey = uuid.v1()
client = new RedisNS 'credentials', redis.createClient @redisKey
jobManager = new JobManager client: client, timeoutSeconds: 1
testClient = new RedisNS 'credentials', redis.createClient @redisKey
@testJobManager = new JobManager client: testClient, timeoutSeconds: 1
@server = new Server serverOptions, {jobManager,credentialsUuid:'credentials-service-uuid'}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
describe 'when a valid request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'credentials-service-uuid'
json:
fromUuid: 'flow-uuid'
payload:
nodeId: 'node-uuid'
transactionId: 'message-uuid'
request.post options, (error, @response, @body) =>
done error
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
describe 'it should store the request into a queue', ->
beforeEach (done) ->
@testJobManager.getRequest ['request'], (error, @result) => done error
it 'should return the request', ->
expect(@result.metadata).to.containSubset
flowId: 'flow-uuid'
nodeId: 'node-uuid'
toNodeId: 'engine-input'
transactionId: 'message-uuid'
describe 'when an unauthorized request is made', ->
beforeEach (done) ->
options =
uri: '/request'
baseUrl: "http://localhost:#{@serverPort}"
httpSignature: @HTTP_SIGNATURE_OPTIONS
headers:
'X-MESHBLU-UUID': 'some-other-uuid'
json: true
request.post options, (error, @response, @body) =>
done error
it 'should return a 422', ->
expect(@response.statusCode).to.equal 422
|
[
{
"context": "./index\")(robot)\n\n userInfo =\n name: \"atmos\",\n room: \"#my-room\"\n\n user = robot",
"end": 773,
"score": 0.836276650428772,
"start": 768,
"tag": "NAME",
"value": "atmos"
}
] | test/scripts/deployment_test.coffee | travis-ci/hubot-deploy | 2 | VCR = require "ys-vcr"
Path = require "path"
Robot = require "hubot/src/robot"
TextMessage = require("hubot/src/message").TextMessage
Verifiers = require(Path.join(__dirname, "..", "..", "src", "models", "verifiers"))
TokenForBrain = Verifiers.VaultKey
describe "Deploying from chat", () ->
user = null
robot = null
adapter = null
beforeEach (done) ->
VCR.playback()
process.env.HUBOT_FERNET_SECRETS or= "HSfTG4uWzw9whtlLEmNAzscHh96eHUFt3McvoWBXmHk="
process.env.HUBOT_DEPLOY_EMIT_GITHUB_DEPLOYMENTS = true
robot = new Robot(null, "mock-adapter", true, "Hubot")
robot.adapter.on "connected", () ->
require("hubot-vault")(robot)
require("../../index")(robot)
userInfo =
name: "atmos",
room: "#my-room"
user = robot.brain.userForId "1", userInfo
adapter = robot.adapter
robot.vault.forUser(user).set(TokenForBrain, "fake-token")
done()
robot.run()
afterEach () ->
delete(process.env.HUBOT_DEPLOY_DEFAULT_ENVIRONMENT)
VCR.stop()
robot.server.close()
robot.shutdown()
it "creates deployments when requested from chat", (done) ->
VCR.play '/repos-atmos-hubot-deploy-deployment-production-create-success'
robot.on "github_deployment", (msg, deployment) ->
assert.equal "hubot-deploy", deployment.name
assert.equal "production", deployment.env
done()
adapter.receive(new TextMessage(user, "Hubot deploy hubot-deploy to production"))
| 117504 | VCR = require "ys-vcr"
Path = require "path"
Robot = require "hubot/src/robot"
TextMessage = require("hubot/src/message").TextMessage
Verifiers = require(Path.join(__dirname, "..", "..", "src", "models", "verifiers"))
TokenForBrain = Verifiers.VaultKey
describe "Deploying from chat", () ->
user = null
robot = null
adapter = null
beforeEach (done) ->
VCR.playback()
process.env.HUBOT_FERNET_SECRETS or= "HSfTG4uWzw9whtlLEmNAzscHh96eHUFt3McvoWBXmHk="
process.env.HUBOT_DEPLOY_EMIT_GITHUB_DEPLOYMENTS = true
robot = new Robot(null, "mock-adapter", true, "Hubot")
robot.adapter.on "connected", () ->
require("hubot-vault")(robot)
require("../../index")(robot)
userInfo =
name: "<NAME>",
room: "#my-room"
user = robot.brain.userForId "1", userInfo
adapter = robot.adapter
robot.vault.forUser(user).set(TokenForBrain, "fake-token")
done()
robot.run()
afterEach () ->
delete(process.env.HUBOT_DEPLOY_DEFAULT_ENVIRONMENT)
VCR.stop()
robot.server.close()
robot.shutdown()
it "creates deployments when requested from chat", (done) ->
VCR.play '/repos-atmos-hubot-deploy-deployment-production-create-success'
robot.on "github_deployment", (msg, deployment) ->
assert.equal "hubot-deploy", deployment.name
assert.equal "production", deployment.env
done()
adapter.receive(new TextMessage(user, "Hubot deploy hubot-deploy to production"))
| true | VCR = require "ys-vcr"
Path = require "path"
Robot = require "hubot/src/robot"
TextMessage = require("hubot/src/message").TextMessage
Verifiers = require(Path.join(__dirname, "..", "..", "src", "models", "verifiers"))
TokenForBrain = Verifiers.VaultKey
describe "Deploying from chat", () ->
user = null
robot = null
adapter = null
beforeEach (done) ->
VCR.playback()
process.env.HUBOT_FERNET_SECRETS or= "HSfTG4uWzw9whtlLEmNAzscHh96eHUFt3McvoWBXmHk="
process.env.HUBOT_DEPLOY_EMIT_GITHUB_DEPLOYMENTS = true
robot = new Robot(null, "mock-adapter", true, "Hubot")
robot.adapter.on "connected", () ->
require("hubot-vault")(robot)
require("../../index")(robot)
userInfo =
name: "PI:NAME:<NAME>END_PI",
room: "#my-room"
user = robot.brain.userForId "1", userInfo
adapter = robot.adapter
robot.vault.forUser(user).set(TokenForBrain, "fake-token")
done()
robot.run()
afterEach () ->
delete(process.env.HUBOT_DEPLOY_DEFAULT_ENVIRONMENT)
VCR.stop()
robot.server.close()
robot.shutdown()
it "creates deployments when requested from chat", (done) ->
VCR.play '/repos-atmos-hubot-deploy-deployment-production-create-success'
robot.on "github_deployment", (msg, deployment) ->
assert.equal "hubot-deploy", deployment.name
assert.equal "production", deployment.env
done()
adapter.receive(new TextMessage(user, "Hubot deploy hubot-deploy to production"))
|
[
{
"context": "\n\tdefaults: =>\n\t\treturn @extend super, \n\t\t\thkey: \"nodepaymentexample\"\n\t\t\thost: \"localhost\"\n\t\t\tport: 6379\n\t\t\toptions: {",
"end": 160,
"score": 0.9927526116371155,
"start": 142,
"tag": "KEY",
"value": "nodepaymentexample"
},
{
"context": "ateClien... | _src/lib/paymentstores/redishashstore.coffee | mpneuried/node-payments | 20 | _ = require( "lodash" )
module.exports = class RedisHashStore extends require( "../basic" )
defaults: =>
return @extend super,
hkey: "nodepaymentexample"
host: "localhost"
port: 6379
options: {}
redis: null
constructor: ->
super
# just a simulation to globaly handle server powered stores
@connected = false
return
connect: =>
if @config.redis?.constructor?.name is "RedisClient"
@redis = @config.redis
else
try
redis = require("redis")
catch _err
@error( "you have to load redis via `npm install redis hiredis`" )
return
@redis = redis.createClient( @config.port or 6379, @config.host or "127.0.0.1", @config.options or {} )
@connected = @redis.connected or false
@redis.on "connect", =>
@connected = true
@emit( "connect" )
return
@redis.on "error", ( err )=>
if err.message.indexOf( "ECONNREFUSED" )
@connected = false
@emit( "disconnect" )
else
@error( "Redis ERROR", err )
@emit( "error" )
return
return
get: ( id, cb )=>
process.nextTick =>
@redis.hget @config.hkey, id, ( err, data )=>
if err
cb( err )
return
cb( null, JSON.parse( data ) )
return
return
return
set: ( payment, cb )=>
process.nextTick =>
@redis.hset @config.hkey, payment.id, payment.toString(), ( err, done )=>
if err
cb( err )
return
@debug "saved", payment.id, payment.toString()
cb( null )
return
return
return
destroy: ( payment, cb )=>
process.nextTick =>
@debug "destroy", payment.id
@redis.hdel @config.hkey, payment.id, ( err, done )=>
cb( err )
return
return
return
clear: ( cb )=>
@debug "clear"
@redis.del @config.hkey, ( err, done )=>
cb( err )
return
return
| 113311 | _ = require( "lodash" )
module.exports = class RedisHashStore extends require( "../basic" )
defaults: =>
return @extend super,
hkey: "<KEY>"
host: "localhost"
port: 6379
options: {}
redis: null
constructor: ->
super
# just a simulation to globaly handle server powered stores
@connected = false
return
connect: =>
if @config.redis?.constructor?.name is "RedisClient"
@redis = @config.redis
else
try
redis = require("redis")
catch _err
@error( "you have to load redis via `npm install redis hiredis`" )
return
@redis = redis.createClient( @config.port or 6379, @config.host or "127.0.0.1", @config.options or {} )
@connected = @redis.connected or false
@redis.on "connect", =>
@connected = true
@emit( "connect" )
return
@redis.on "error", ( err )=>
if err.message.indexOf( "ECONNREFUSED" )
@connected = false
@emit( "disconnect" )
else
@error( "Redis ERROR", err )
@emit( "error" )
return
return
get: ( id, cb )=>
process.nextTick =>
@redis.hget @config.hkey, id, ( err, data )=>
if err
cb( err )
return
cb( null, JSON.parse( data ) )
return
return
return
set: ( payment, cb )=>
process.nextTick =>
@redis.hset @config.hkey, payment.id, payment.toString(), ( err, done )=>
if err
cb( err )
return
@debug "saved", payment.id, payment.toString()
cb( null )
return
return
return
destroy: ( payment, cb )=>
process.nextTick =>
@debug "destroy", payment.id
@redis.hdel @config.hkey, payment.id, ( err, done )=>
cb( err )
return
return
return
clear: ( cb )=>
@debug "clear"
@redis.del @config.hkey, ( err, done )=>
cb( err )
return
return
| true | _ = require( "lodash" )
module.exports = class RedisHashStore extends require( "../basic" )
defaults: =>
return @extend super,
hkey: "PI:KEY:<KEY>END_PI"
host: "localhost"
port: 6379
options: {}
redis: null
constructor: ->
super
# just a simulation to globaly handle server powered stores
@connected = false
return
connect: =>
if @config.redis?.constructor?.name is "RedisClient"
@redis = @config.redis
else
try
redis = require("redis")
catch _err
@error( "you have to load redis via `npm install redis hiredis`" )
return
@redis = redis.createClient( @config.port or 6379, @config.host or "127.0.0.1", @config.options or {} )
@connected = @redis.connected or false
@redis.on "connect", =>
@connected = true
@emit( "connect" )
return
@redis.on "error", ( err )=>
if err.message.indexOf( "ECONNREFUSED" )
@connected = false
@emit( "disconnect" )
else
@error( "Redis ERROR", err )
@emit( "error" )
return
return
get: ( id, cb )=>
process.nextTick =>
@redis.hget @config.hkey, id, ( err, data )=>
if err
cb( err )
return
cb( null, JSON.parse( data ) )
return
return
return
set: ( payment, cb )=>
process.nextTick =>
@redis.hset @config.hkey, payment.id, payment.toString(), ( err, done )=>
if err
cb( err )
return
@debug "saved", payment.id, payment.toString()
cb( null )
return
return
return
destroy: ( payment, cb )=>
process.nextTick =>
@debug "destroy", payment.id
@redis.hdel @config.hkey, payment.id, ( err, done )=>
cb( err )
return
return
return
clear: ( cb )=>
@debug "clear"
@redis.del @config.hkey, ( err, done )=>
cb( err )
return
return
|
[
{
"context": "# Description:\n# Marvin, the Paranoid Android, from The Hitchhiker's Guid",
"end": 25,
"score": 0.995067834854126,
"start": 19,
"tag": "NAME",
"value": "Marvin"
},
{
"context": "ved me\n# hubot how is your life?\n#\n# Author:\n# jweslley\n\nquotes = [\n \"I thin... | src/scripts/marvin-quotes.coffee | Devex/hubot-scripts | 0 | # Description:
# Marvin, the Paranoid Android, from The Hitchhiker's Guide to the Galaxy
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot you saved me
# hubot how is your life?
#
# Author:
# jweslley
quotes = [
"I think you ought to know I'm feeling very depressed.",
"Life! Don't talk to me about life",
"Life, loathe it or ignore it, you can't like it",
"Life's bad enough as it is without wanting to invent any more of it",
"Funny, how just when you think life can't possibly get any worse it suddenly does",
"I have a million ideas. They all point to certain death.",
"My capacity for happiness, you could fit into a matchbox without taking out the matches first",
"I could calculate your chance of survival, but you won't like it.",
"I'd give you advice, but you wouldn't listen. No one ever does.",
"I ache, therefore I am.",
"Pardon me for breathing, which I never do anyway so I don't know why I bother to say it, oh God, I'm so depressed. Here's another one of those self-satisfied doors. Life! Don't talk to me about life.",
"Not that anyone cares what I say, but the Restaurant is on the other end of the universe",
"Here I am, brain the size of a planet and they ask me to take you down to the bridge. Call that job satisfaction? 'Cos I don't.",
"Funny, how just when you think life can't possibly get any worse it suddenly does."
]
module.exports = (robot) ->
robot.hear /you saved me/, (msg) ->
msg.send "I know. Wretched isn't it?"
robot.hear /(.*)(life)(.*)/i, (msg) ->
msg.send msg.random quotes
| 172497 | # Description:
# <NAME>, the Paranoid Android, from The Hitchhiker's Guide to the Galaxy
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot you saved me
# hubot how is your life?
#
# Author:
# jweslley
quotes = [
"I think you ought to know I'm feeling very depressed.",
"Life! Don't talk to me about life",
"Life, loathe it or ignore it, you can't like it",
"Life's bad enough as it is without wanting to invent any more of it",
"Funny, how just when you think life can't possibly get any worse it suddenly does",
"I have a million ideas. They all point to certain death.",
"My capacity for happiness, you could fit into a matchbox without taking out the matches first",
"I could calculate your chance of survival, but you won't like it.",
"I'd give you advice, but you wouldn't listen. No one ever does.",
"I ache, therefore I am.",
"Pardon me for breathing, which I never do anyway so I don't know why I bother to say it, oh God, I'm so depressed. Here's another one of those self-satisfied doors. Life! Don't talk to me about life.",
"Not that anyone cares what I say, but the Restaurant is on the other end of the universe",
"Here I am, brain the size of a planet and they ask me to take you down to the bridge. Call that job satisfaction? 'Cos I don't.",
"Funny, how just when you think life can't possibly get any worse it suddenly does."
]
module.exports = (robot) ->
robot.hear /you saved me/, (msg) ->
msg.send "I know. Wretched isn't it?"
robot.hear /(.*)(life)(.*)/i, (msg) ->
msg.send msg.random quotes
| true | # Description:
# PI:NAME:<NAME>END_PI, the Paranoid Android, from The Hitchhiker's Guide to the Galaxy
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot you saved me
# hubot how is your life?
#
# Author:
# jweslley
quotes = [
"I think you ought to know I'm feeling very depressed.",
"Life! Don't talk to me about life",
"Life, loathe it or ignore it, you can't like it",
"Life's bad enough as it is without wanting to invent any more of it",
"Funny, how just when you think life can't possibly get any worse it suddenly does",
"I have a million ideas. They all point to certain death.",
"My capacity for happiness, you could fit into a matchbox without taking out the matches first",
"I could calculate your chance of survival, but you won't like it.",
"I'd give you advice, but you wouldn't listen. No one ever does.",
"I ache, therefore I am.",
"Pardon me for breathing, which I never do anyway so I don't know why I bother to say it, oh God, I'm so depressed. Here's another one of those self-satisfied doors. Life! Don't talk to me about life.",
"Not that anyone cares what I say, but the Restaurant is on the other end of the universe",
"Here I am, brain the size of a planet and they ask me to take you down to the bridge. Call that job satisfaction? 'Cos I don't.",
"Funny, how just when you think life can't possibly get any worse it suddenly does."
]
module.exports = (robot) ->
robot.hear /you saved me/, (msg) ->
msg.send "I know. Wretched isn't it?"
robot.hear /(.*)(life)(.*)/i, (msg) ->
msg.send msg.random quotes
|
[
{
"context": ".state.length == 0\n phantom.state = [\n 'sencha'\n 'aconran'\n 'ariyahidayat'\n ",
"end": 120,
"score": 0.9943153262138367,
"start": 114,
"tag": "NAME",
"value": "sencha"
},
{
"context": "0\n phantom.state = [\n 'sencha'\n ... | app/contact-app/contact-webapp/src/main/webapp/touch/sdk/command/vendor/nodejs/node_modules/jasmine-node/node_modules/jasmine-reporters/ext/phantomjs/examples/follow.coffee | hiya492/spring | 213 | # List following and followers from several accounts
if phantom.state.length == 0
phantom.state = [
'sencha'
'aconran'
'ariyahidayat'
'darrellmeyer'
'DavidKaneda'
'DmitryBaranovsk'
'donovanerba'
'edspencer'
'helder_correia'
'jamespearce'
'jamieavins'
'jarrednicholls'
'jayrobinson'
'lojjic'
'mmullany'
'philogb'
'rdougan'
'tmaintz'
'whereisthysting'
].join ':'
phantom.open 'http://mobile.twitter.com/sencha'
else
users = phantom.state.split ':'
id = users[0]
next = users[1]
data = document.querySelector 'div.timeline-following'
phantom.state = users.slice(1).join ':'
console.log id + ': ' + data.innerText
if next
phantom.open 'http://mobile.twitter.com/' + next
else
phantom.exit 1
| 167675 | # List following and followers from several accounts
if phantom.state.length == 0
phantom.state = [
'<NAME>'
'<NAME>'
'<NAME>'
'darrellmeyer'
'<NAME>'
'<NAME>'
'donovanerba'
'edspencer'
'helder_correia'
'jamespearce'
'jamieavins'
'jarrednicholls'
'jayrobin<NAME>'
'lojjic'
'mmullany'
'philogb'
'rdougan'
'tmaintz'
'whereisthysting'
].join ':'
phantom.open 'http://mobile.twitter.com/sencha'
else
users = phantom.state.split ':'
id = users[0]
next = users[1]
data = document.querySelector 'div.timeline-following'
phantom.state = users.slice(1).join ':'
console.log id + ': ' + data.innerText
if next
phantom.open 'http://mobile.twitter.com/' + next
else
phantom.exit 1
| true | # List following and followers from several accounts
if phantom.state.length == 0
phantom.state = [
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'darrellmeyer'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'donovanerba'
'edspencer'
'helder_correia'
'jamespearce'
'jamieavins'
'jarrednicholls'
'jayrobinPI:NAME:<NAME>END_PI'
'lojjic'
'mmullany'
'philogb'
'rdougan'
'tmaintz'
'whereisthysting'
].join ':'
phantom.open 'http://mobile.twitter.com/sencha'
else
users = phantom.state.split ':'
id = users[0]
next = users[1]
data = document.querySelector 'div.timeline-following'
phantom.state = users.slice(1).join ':'
console.log id + ': ' + data.innerText
if next
phantom.open 'http://mobile.twitter.com/' + next
else
phantom.exit 1
|
[
{
"context": "module.exports =\n\n provideProvider: ->\n key: 'makefile'\n mod: require './module'\n",
"end": 58,
"score": 0.9914454221725464,
"start": 50,
"tag": "KEY",
"value": "makefile"
}
] | lib/build-tools-make.coffee | deprint/build-tools-make | 0 | module.exports =
provideProvider: ->
key: 'makefile'
mod: require './module'
| 214402 | module.exports =
provideProvider: ->
key: '<KEY>'
mod: require './module'
| true | module.exports =
provideProvider: ->
key: 'PI:KEY:<KEY>END_PI'
mod: require './module'
|
[
{
"context": " [iOrder](http://neocotic.com/iOrder) \n# (c) 2013 Alasdair Mercer \n# Freely distributable under the MIT license. ",
"end": 67,
"score": 0.9998736381530762,
"start": 52,
"tag": "NAME",
"value": "Alasdair Mercer"
}
] | chrome/src/lib/notification.coffee | neocotic/iOrder | 0 | # [iOrder](http://neocotic.com/iOrder)
# (c) 2013 Alasdair Mercer
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/iOrder>
# Private variables
# -----------------
# Easily accessible reference to analytics, logging, storage, utilities, and the extension
# controller.
{analytics, ext, log, store, utils} = chrome.extension.getBackgroundPage()
# Private functions
# -----------------
# Build the HTML or create the elements to be displayed within the notification.
buildContents = ->
log.trace()
data = ext.notification ? {}
log.debug 'Building a desktop notfication for the following data...', data
if data.html
document.body.innerHTML = data.html
else
if data.icon
icon = document.createElement 'div'
icon.id = 'icon'
icon.style.cssText = data.iconStyle if data.iconStyle
image = document.createElement 'img'
image.src = data.icon
image.width = 32
image.height = 32
icon.appendChild image
document.body.appendChild icon
if data.title
title = document.createElement 'div'
title.id = 'title'
title.innerHTML = data.title
title.style.cssText = data.titleStyle if data.titleStyle
document.body.appendChild title
if data.description
description = document.createElement 'div'
description.id = 'description'
description.innerHTML = data.description
description.style.cssText = data.descriptionStyle if data.descriptionStyle
document.body.appendChild description
# Notification page setup
# -----------------------
notification = window.notification = new class Notification extends utils.Class
# Public functions
# ----------------
# Initialize the notification page.
init: ->
log.trace()
log.info 'Initializing a notification'
analytics.track 'Frames', 'Displayed', 'Notification'
# Populate the desktop notification.
# Afterwards, reset `ext` to avoid affecting other update processes. If the user has disabled
# the notifications option this should still be called for safety.
do buildContents
ext.reset()
# Set a timer to close the notification after a specified period of time, if the user enabled
# the corresponding option; otherwise it should stay open until it is closed manually by the
# user.
duration = store.get 'notifications.duration'
setTimeout (-> do close), duration if duration > 0
# Initialize `notification` when the DOM is ready.
utils.ready this, -> notification.init() | 145534 | # [iOrder](http://neocotic.com/iOrder)
# (c) 2013 <NAME>
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/iOrder>
# Private variables
# -----------------
# Easily accessible reference to analytics, logging, storage, utilities, and the extension
# controller.
{analytics, ext, log, store, utils} = chrome.extension.getBackgroundPage()
# Private functions
# -----------------
# Build the HTML or create the elements to be displayed within the notification.
buildContents = ->
log.trace()
data = ext.notification ? {}
log.debug 'Building a desktop notfication for the following data...', data
if data.html
document.body.innerHTML = data.html
else
if data.icon
icon = document.createElement 'div'
icon.id = 'icon'
icon.style.cssText = data.iconStyle if data.iconStyle
image = document.createElement 'img'
image.src = data.icon
image.width = 32
image.height = 32
icon.appendChild image
document.body.appendChild icon
if data.title
title = document.createElement 'div'
title.id = 'title'
title.innerHTML = data.title
title.style.cssText = data.titleStyle if data.titleStyle
document.body.appendChild title
if data.description
description = document.createElement 'div'
description.id = 'description'
description.innerHTML = data.description
description.style.cssText = data.descriptionStyle if data.descriptionStyle
document.body.appendChild description
# Notification page setup
# -----------------------
notification = window.notification = new class Notification extends utils.Class
# Public functions
# ----------------
# Initialize the notification page.
init: ->
log.trace()
log.info 'Initializing a notification'
analytics.track 'Frames', 'Displayed', 'Notification'
# Populate the desktop notification.
# Afterwards, reset `ext` to avoid affecting other update processes. If the user has disabled
# the notifications option this should still be called for safety.
do buildContents
ext.reset()
# Set a timer to close the notification after a specified period of time, if the user enabled
# the corresponding option; otherwise it should stay open until it is closed manually by the
# user.
duration = store.get 'notifications.duration'
setTimeout (-> do close), duration if duration > 0
# Initialize `notification` when the DOM is ready.
utils.ready this, -> notification.init() | true | # [iOrder](http://neocotic.com/iOrder)
# (c) 2013 PI:NAME:<NAME>END_PI
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/iOrder>
# Private variables
# -----------------
# Easily accessible reference to analytics, logging, storage, utilities, and the extension
# controller.
{analytics, ext, log, store, utils} = chrome.extension.getBackgroundPage()
# Private functions
# -----------------
# Build the HTML or create the elements to be displayed within the notification.
buildContents = ->
log.trace()
data = ext.notification ? {}
log.debug 'Building a desktop notfication for the following data...', data
if data.html
document.body.innerHTML = data.html
else
if data.icon
icon = document.createElement 'div'
icon.id = 'icon'
icon.style.cssText = data.iconStyle if data.iconStyle
image = document.createElement 'img'
image.src = data.icon
image.width = 32
image.height = 32
icon.appendChild image
document.body.appendChild icon
if data.title
title = document.createElement 'div'
title.id = 'title'
title.innerHTML = data.title
title.style.cssText = data.titleStyle if data.titleStyle
document.body.appendChild title
if data.description
description = document.createElement 'div'
description.id = 'description'
description.innerHTML = data.description
description.style.cssText = data.descriptionStyle if data.descriptionStyle
document.body.appendChild description
# Notification page setup
# -----------------------
notification = window.notification = new class Notification extends utils.Class
# Public functions
# ----------------
# Initialize the notification page.
init: ->
log.trace()
log.info 'Initializing a notification'
analytics.track 'Frames', 'Displayed', 'Notification'
# Populate the desktop notification.
# Afterwards, reset `ext` to avoid affecting other update processes. If the user has disabled
# the notifications option this should still be called for safety.
do buildContents
ext.reset()
# Set a timer to close the notification after a specified period of time, if the user enabled
# the corresponding option; otherwise it should stay open until it is closed manually by the
# user.
duration = store.get 'notifications.duration'
setTimeout (-> do close), duration if duration > 0
# Initialize `notification` when the DOM is ready.
utils.ready this, -> notification.init() |
[
{
"context": " data =\n email: @state.email\n password: @state.password\n request.post '/login', data, (err, res) =>\n ",
"end": 570,
"score": 0.9566632509231567,
"start": 555,
"tag": "PASSWORD",
"value": "@state.password"
}
] | client/views/Login/index.coffee | stevelacy/portal | 0 | request = require 'superagent'
{view, DOM} = require 'fission'
NavbarView = require '../../components/NavBar'
{div, h1, button, a, img, br, form, input} = DOM
module.exports = view
displayName: 'Login'
init: ->
email: ''
password: ''
status: ''
updateEmail: (e) ->
@setState email: e.target.value
updatePassword: (e) ->
@setState password: e.target.value
login: (e) ->
e.preventDefault()
return unless @state.email?.length > 1 and @state.password?.length > 3
data =
email: @state.email
password: @state.password
request.post '/login', data, (err, res) =>
if res?.status == 200
window.localStorage.setItem 'token', res.body.token
window.location = '/'
else
@setState status: res.body.message
setTimeout =>
@setState status: ''
, 2000
mounted: ->
@refs.email.getDOMNode().focus()
render: ->
div className: 'login view',
NavbarView
color: 'light'
div className: 'page',
div className: 'box',
div className: 'logo white', 'Login'
form
method: 'post'
onSubmit: @login,
input
ref: 'email'
type: 'email'
name: 'email'
placeholder: 'Email'
value: @state.email
onChange: @updateEmail
input
type: 'password'
name: 'password'
value: @state.password
onChange: @updatePassword
placeholder: '****'
input
type: 'submit'
value: 'LOGIN'
className: 'button blue large wide center'
style: cursor: 'pointer'
div className: 'status',
@state.status
| 202351 | request = require 'superagent'
{view, DOM} = require 'fission'
NavbarView = require '../../components/NavBar'
{div, h1, button, a, img, br, form, input} = DOM
module.exports = view
displayName: 'Login'
init: ->
email: ''
password: ''
status: ''
updateEmail: (e) ->
@setState email: e.target.value
updatePassword: (e) ->
@setState password: e.target.value
login: (e) ->
e.preventDefault()
return unless @state.email?.length > 1 and @state.password?.length > 3
data =
email: @state.email
password: <PASSWORD>
request.post '/login', data, (err, res) =>
if res?.status == 200
window.localStorage.setItem 'token', res.body.token
window.location = '/'
else
@setState status: res.body.message
setTimeout =>
@setState status: ''
, 2000
mounted: ->
@refs.email.getDOMNode().focus()
render: ->
div className: 'login view',
NavbarView
color: 'light'
div className: 'page',
div className: 'box',
div className: 'logo white', 'Login'
form
method: 'post'
onSubmit: @login,
input
ref: 'email'
type: 'email'
name: 'email'
placeholder: 'Email'
value: @state.email
onChange: @updateEmail
input
type: 'password'
name: 'password'
value: @state.password
onChange: @updatePassword
placeholder: '****'
input
type: 'submit'
value: 'LOGIN'
className: 'button blue large wide center'
style: cursor: 'pointer'
div className: 'status',
@state.status
| true | request = require 'superagent'
{view, DOM} = require 'fission'
NavbarView = require '../../components/NavBar'
{div, h1, button, a, img, br, form, input} = DOM
module.exports = view
displayName: 'Login'
init: ->
email: ''
password: ''
status: ''
updateEmail: (e) ->
@setState email: e.target.value
updatePassword: (e) ->
@setState password: e.target.value
login: (e) ->
e.preventDefault()
return unless @state.email?.length > 1 and @state.password?.length > 3
data =
email: @state.email
password: PI:PASSWORD:<PASSWORD>END_PI
request.post '/login', data, (err, res) =>
if res?.status == 200
window.localStorage.setItem 'token', res.body.token
window.location = '/'
else
@setState status: res.body.message
setTimeout =>
@setState status: ''
, 2000
mounted: ->
@refs.email.getDOMNode().focus()
render: ->
div className: 'login view',
NavbarView
color: 'light'
div className: 'page',
div className: 'box',
div className: 'logo white', 'Login'
form
method: 'post'
onSubmit: @login,
input
ref: 'email'
type: 'email'
name: 'email'
placeholder: 'Email'
value: @state.email
onChange: @updateEmail
input
type: 'password'
name: 'password'
value: @state.password
onChange: @updatePassword
placeholder: '****'
input
type: 'submit'
value: 'LOGIN'
className: 'button blue large wide center'
style: cursor: 'pointer'
div className: 'status',
@state.status
|
[
{
"context": "\n\t\t\tsecret = $('#secret').val()\n\t\t\tusername = $('#username').val()\n\t\t\tpassword = $('#password').val()\n\t\t\trem",
"end": 352,
"score": 0.9928346872329712,
"start": 344,
"tag": "USERNAME",
"value": "username"
},
{
"context": "username = $('#username').val()\... | src/register.coffee | roobin/StaticSite | 1 |
require ['app/auth', 'util/admin-redirect'], (auth) ->
# view code
$ ->
$('body').fadeIn()
$('div.alert').hide()
$('#key').focus()
$('#registerform').submit (event) ->
event.preventDefault()
$('#alerts').slideUp('fast')
# process sign-in
key = $('#key').val()
secret = $('#secret').val()
username = $('#username').val()
password = $('#password').val()
rememberme = $('#rememberme').prop('checked')
auth.register(key, secret, username, password, rememberme).then ->
auth.login(username, password, rememberme).then ->
location.href = './'
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
$('div.alert .close').click (event) ->
event.preventDefault()
$(this).closest('.alert').slideUp('fast')
$('#key').focus()
# cancel by esc
$('#registerform').keyup (event) ->
if event.which is 27
location.href = $('a.cancel').attr('href')
| 1942 |
require ['app/auth', 'util/admin-redirect'], (auth) ->
# view code
$ ->
$('body').fadeIn()
$('div.alert').hide()
$('#key').focus()
$('#registerform').submit (event) ->
event.preventDefault()
$('#alerts').slideUp('fast')
# process sign-in
key = $('#key').val()
secret = $('#secret').val()
username = $('#username').val()
password = $('#<PASSWORD>').val()
rememberme = $('#rememberme').prop('checked')
auth.register(key, secret, username, password, rememberme).then ->
auth.login(username, password, rememberme).then ->
location.href = './'
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
$('div.alert .close').click (event) ->
event.preventDefault()
$(this).closest('.alert').slideUp('fast')
$('#key').focus()
# cancel by esc
$('#registerform').keyup (event) ->
if event.which is 27
location.href = $('a.cancel').attr('href')
| true |
require ['app/auth', 'util/admin-redirect'], (auth) ->
# view code
$ ->
$('body').fadeIn()
$('div.alert').hide()
$('#key').focus()
$('#registerform').submit (event) ->
event.preventDefault()
$('#alerts').slideUp('fast')
# process sign-in
key = $('#key').val()
secret = $('#secret').val()
username = $('#username').val()
password = $('#PI:PASSWORD:<PASSWORD>END_PI').val()
rememberme = $('#rememberme').prop('checked')
auth.register(key, secret, username, password, rememberme).then ->
auth.login(username, password, rememberme).then ->
location.href = './'
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
, (err) ->
console.log(err)
$('#alerts').slideDown('fast').find('.msg').text(err.message)
$('div.alert .close').click (event) ->
event.preventDefault()
$(this).closest('.alert').slideUp('fast')
$('#key').focus()
# cancel by esc
$('#registerform').keyup (event) ->
if event.which is 27
location.href = $('a.cancel').attr('href')
|
[
{
"context": "ple from rfc 2617\ntest1 = (success) ->\n realm = 'testrealm@host.com'\n nonce = 'dcd98b7102dd2f0e8b11d0f600bfb0c093'\n ",
"end": 211,
"score": 0.999916136264801,
"start": 193,
"tag": "EMAIL",
"value": "testrealm@host.com"
},
{
"context": "e = '0a4f113b'\n ha1 = dige... | test/digest.coffee | kelchy/sip.js | 221 | sip = require '../sip'
digest = require '../digest'
assert = require 'assert'
util = require 'util'
## testing digest calculation againts example from rfc 2617
test1 = (success) ->
realm = 'testrealm@host.com'
nonce = 'dcd98b7102dd2f0e8b11d0f600bfb0c093'
opaque= '5ccc069c403ebaf9f0171e9517f40e41'
cnonce = '0a4f113b'
ha1 = digest.calculateHA1({user:'Mufasa', realm: realm, password: 'Circle Of Life'});
assert.ok (digest.calculateDigest {
ha1:ha1,
method:'GET',
nonce:nonce,
nc:'00000001',
cnonce:cnonce,
qop:'auth',
uri:'/dir/index.html'
}) == '6629fae49393a05397450978507c4ef1'
success()
test2 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:carol@chicago.com'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:carol@chicago.com'}
from: {name: 'Alice', uri:'sip:alice@atlanta.com', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:alice@pc33.atlanta.com'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 401, 'Authentication Required'
assert.ok rs.headers['www-authenticate'], "www-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: '1234'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: '1234'}
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
success()
test3 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:carol@chicago.com'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:carol@chicago.com'}
from: {name: 'Alice', uri:'sip:alice@atlanta.com', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:alice@pc33.atlanta.com'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 407, 'Proxy Authentication Required'
assert.ok rs.headers['proxy-authenticate'], "proxy-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: '1234'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: '1234'}
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
success()
test4 = (success) ->
assert.ok (new Date() - digest.extractNonceTimestamp(digest.generateNonce('1234'), '1234')) < 1000, 'timestamped nonce fail'
success()
exports.tests = [test1, test2, test3, test4]
| 99475 | sip = require '../sip'
digest = require '../digest'
assert = require 'assert'
util = require 'util'
## testing digest calculation againts example from rfc 2617
test1 = (success) ->
realm = '<EMAIL>'
nonce = 'dcd98b7102dd2f0e8b11d0f600bfb0c093'
opaque= '5ccc069c403ebaf9f0171e9517f40e41'
cnonce = '0a4f113b'
ha1 = digest.calculateHA1({user:'Mufasa', realm: realm, password: '<PASSWORD>'});
assert.ok (digest.calculateDigest {
ha1:ha1,
method:'GET',
nonce:nonce,
nc:'00000001',
cnonce:cnonce,
qop:'auth',
uri:'/dir/index.html'
}) == '6629fae49393a05397450978507c4ef1'
success()
test2 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:<EMAIL>'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:<EMAIL>'}
from: {name: '<NAME>', uri:'sip:<EMAIL>', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:<EMAIL>'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 401, 'Authentication Required'
assert.ok rs.headers['www-authenticate'], "www-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: '<PASSWORD>'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: '<PASSWORD>'}
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
success()
test3 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:<EMAIL>'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:<EMAIL>'}
from: {name: '<NAME>', uri:'sip:<EMAIL>', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:<EMAIL>'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 407, 'Proxy Authentication Required'
assert.ok rs.headers['proxy-authenticate'], "proxy-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: '<PASSWORD>'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: '<PASSWORD>'}
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
success()
test4 = (success) ->
assert.ok (new Date() - digest.extractNonceTimestamp(digest.generateNonce('1234'), '1234')) < 1000, 'timestamped nonce fail'
success()
exports.tests = [test1, test2, test3, test4]
| true | sip = require '../sip'
digest = require '../digest'
assert = require 'assert'
util = require 'util'
## testing digest calculation againts example from rfc 2617
test1 = (success) ->
realm = 'PI:EMAIL:<EMAIL>END_PI'
nonce = 'dcd98b7102dd2f0e8b11d0f600bfb0c093'
opaque= '5ccc069c403ebaf9f0171e9517f40e41'
cnonce = '0a4f113b'
ha1 = digest.calculateHA1({user:'Mufasa', realm: realm, password: 'PI:PASSWORD:<PASSWORD>END_PI'});
assert.ok (digest.calculateDigest {
ha1:ha1,
method:'GET',
nonce:nonce,
nc:'00000001',
cnonce:cnonce,
qop:'auth',
uri:'/dir/index.html'
}) == '6629fae49393a05397450978507c4ef1'
success()
test2 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:PI:EMAIL:<EMAIL>END_PI'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:PI:EMAIL:<EMAIL>END_PI'}
from: {name: 'PI:NAME:<NAME>END_PI', uri:'sip:PI:EMAIL:<EMAIL>END_PI', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:PI:EMAIL:<EMAIL>END_PI'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 401, 'Authentication Required'
assert.ok rs.headers['www-authenticate'], "www-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
assert.ok digest.authenticateResponse client, digest.signResponse server, sip.makeResponse rq, 200
success()
test3 = (success) ->
rq =
method: 'OPTIONS'
uri: 'sip:PI:EMAIL:<EMAIL>END_PI'
headers:
via: {host: 'pc33.atlanta.com', params: {branch: 'z9hG4bKhjhs8ass877'}}
to: {uri: 'sip:PI:EMAIL:<EMAIL>END_PI'}
from: {name: 'PI:NAME:<NAME>END_PI', uri:'sip:PI:EMAIL:<EMAIL>END_PI', params: {tag:'1928301774'}}
'call-id': 'a84b4c76e66710'
cseq: {seq: 63104, method: 'OPTIONS'}
contact: [{uri: 'sip:PI:EMAIL:<EMAIL>END_PI'}]
accept: 'application/sdp'
'content-length': 0
server = {realm: 'test'}
rs = digest.challenge server, sip.makeResponse rq, 407, 'Proxy Authentication Required'
assert.ok rs.headers['proxy-authenticate'], "proxy-authenticate header not present"
client = {}
digest.signRequest client, rq, rs, {user:'carol', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
assert.ok digest.authenticateRequest server, rq, {user: 'carol', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
digest.signRequest client, rq
assert.ok digest.authenticateRequest server, rq
success()
test4 = (success) ->
assert.ok (new Date() - digest.extractNonceTimestamp(digest.generateNonce('1234'), '1234')) < 1000, 'timestamped nonce fail'
success()
exports.tests = [test1, test2, test3, test4]
|
[
{
"context": "json')\n patient = collection.findWhere(first: 'GP_Peds', last: 'A')\n results = @measure.get('populati",
"end": 746,
"score": 0.8289687037467957,
"start": 739,
"tag": "NAME",
"value": "GP_Peds"
}
] | spec/javascripts/models/measure_spec.js.coffee | okeefm/bonnie | 0 | describe 'Measure', ->
beforeEach ->
@measure = bonnie.measures.get('40280381-3D61-56A7-013E-5D1EF9B76A48')
it 'has basic attributes available', ->
expect(@measure.id).toEqual '40280381-3D61-56A7-013E-5D1EF9B76A48'
expect(@measure.get('title')).toEqual 'Appropriate Testing for Children with Pharyngitis'
it 'has the expected number of populations', ->
expect(@measure.get('populations').length).toEqual 1
it 'has set itself as parent on measure_data_criteria', ->
expect(@measure.get("source_data_criteria").get('parent') == @measure)
it 'can calulate results for a patient', ->
collection = new Thorax.Collections.Patients getJSONFixture('patients.json')
patient = collection.findWhere(first: 'GP_Peds', last: 'A')
results = @measure.get('populations').at(0).calculate(patient)
waitsForAndRuns( -> results.isPopulated()
,
->
expect(results.get('DENEX')).toEqual 0
expect(results.get('DENEXCEP')).toEqual 0
expect(results.get('DENOM')).toEqual 1
expect(results.get('IPP')).toEqual 1
expect(results.get('NUMER')).toEqual 0
)
| 93501 | describe 'Measure', ->
beforeEach ->
@measure = bonnie.measures.get('40280381-3D61-56A7-013E-5D1EF9B76A48')
it 'has basic attributes available', ->
expect(@measure.id).toEqual '40280381-3D61-56A7-013E-5D1EF9B76A48'
expect(@measure.get('title')).toEqual 'Appropriate Testing for Children with Pharyngitis'
it 'has the expected number of populations', ->
expect(@measure.get('populations').length).toEqual 1
it 'has set itself as parent on measure_data_criteria', ->
expect(@measure.get("source_data_criteria").get('parent') == @measure)
it 'can calulate results for a patient', ->
collection = new Thorax.Collections.Patients getJSONFixture('patients.json')
patient = collection.findWhere(first: '<NAME>', last: 'A')
results = @measure.get('populations').at(0).calculate(patient)
waitsForAndRuns( -> results.isPopulated()
,
->
expect(results.get('DENEX')).toEqual 0
expect(results.get('DENEXCEP')).toEqual 0
expect(results.get('DENOM')).toEqual 1
expect(results.get('IPP')).toEqual 1
expect(results.get('NUMER')).toEqual 0
)
| true | describe 'Measure', ->
beforeEach ->
@measure = bonnie.measures.get('40280381-3D61-56A7-013E-5D1EF9B76A48')
it 'has basic attributes available', ->
expect(@measure.id).toEqual '40280381-3D61-56A7-013E-5D1EF9B76A48'
expect(@measure.get('title')).toEqual 'Appropriate Testing for Children with Pharyngitis'
it 'has the expected number of populations', ->
expect(@measure.get('populations').length).toEqual 1
it 'has set itself as parent on measure_data_criteria', ->
expect(@measure.get("source_data_criteria").get('parent') == @measure)
it 'can calulate results for a patient', ->
collection = new Thorax.Collections.Patients getJSONFixture('patients.json')
patient = collection.findWhere(first: 'PI:NAME:<NAME>END_PI', last: 'A')
results = @measure.get('populations').at(0).calculate(patient)
waitsForAndRuns( -> results.isPopulated()
,
->
expect(results.get('DENEX')).toEqual 0
expect(results.get('DENEXCEP')).toEqual 0
expect(results.get('DENOM')).toEqual 1
expect(results.get('IPP')).toEqual 1
expect(results.get('NUMER')).toEqual 0
)
|
[
{
"context": " - pixelir_sprite.coffee\n#\n# 2016.11.17 Created by PROJECT PROMINENCE\n#\n#***********************************",
"end": 320,
"score": 0.880776584148407,
"start": 313,
"tag": "NAME",
"value": "PROJECT"
},
{
"context": "ir_sprite.coffee\n#\n# 2016.11.17 Created by PROJE... | pixelir_sprite.coffee | digitarhythm/PixelirJS | 0 | #****************************************************************************
#****************************************************************************
#****************************************************************************
#
# PixelirJS Sprite Library - pixelir_sprite.coffee
#
# 2016.11.17 Created by PROJECT PROMINENCE
#
#****************************************************************************
#****************************************************************************
#****************************************************************************
class pixelir_sprite
# public variables
@x = undefined
@y = undefined
@z = undefined
@frameIndex = undefined
@hidden = undefined
@object = undefined
@width = undefined
@height = undefined
@scalew = undefined
@scaleh = undefined
@maxFrame = undefined
@rotate = undefined
@layer = undefined
@spriteID = undefined
@patternList = undefined
@patternNum = undefined
@animetime = undefined
@mesh = undefined
@objectwidth = undefined
@objectheight = undefined
@canvas = undefined
@context = undefined
@texture = undefined
@geometroy = undefined
# private variable
RAD = Math.PI / 180.0
constructor:(arr)->
@x = if (arr['x']?) then arr['x'] else 0
@y = if (arr['y']?) then arr['y'] else 0
@z = if (arr['z']?) then arr['z'] else 0
@xs = if (arr['xs']?) then arr['xs'] else 0
@ys = if (arr['ys']?) then arr['ys'] else 0
@zs = if (arr['zs']?) then arr['zs'] else 0
@gravity = if (arr['gravity']?) then arr['gravity'] else 0
@frameIndex = if (arr['frameIndex']?) then arr['frameIndex'] else 0
@hidden = if (arr['hidden']?) then arr['hidden'] else false
@object = if (arr['object']?) then arr['object'] else undefined
@width = if (arr['width']?) then arr['width'] else 32
@height = if (arr['height']?) then arr['height'] else 32
@orgscale = if (arr['orgscale']?) then arr['orgscale'] else 1.0
@xscale = if (arr['xscale']?) then arr['xscale'] else 1.0
@yscale = if (arr['yscale']?) then arr['yscale'] else 1.0
@zscale = if (arr['zscale']?) then arr['zscale'] else 1.0
@rotate = if (arr['rotate']?) then arr['rotate'] else 0.0
@xrotate = if (arr['xrotate']?) then arr['xrotate'] else undefined
@yrotate = if (arr['yrotate']?) then arr['yrotate'] else undefined
@zrotate = if (arr['zrotate']?) then arr['zrotate'] else undefined
@xsegments = if (arr['xsegments']?) then arr['xsegments'] else 1
@ysegments = if (arr['ysegments']?) then arr['ysegments'] else 1
@color = if (arr['color']?) then arr['color'] else '0x000000'
@patternList = if (arr['patternList']?) then arr['patternList'] else [[100, [0]]]
@patternNum = if (arr['patternNum']?) then arr['patternNum'] else 0
@spriteID = if (arr['spriteID']?) then arr['spriteID'] else undefined
switch (@object.type)
#========================================================================
# 画像
#========================================================================
when 'image'
image = @object.object
# texture width
@objectwidth = image.width
# texture height
@objectheight = image.height
@animetime = new Date().getTime()
spwnum = Math.floor(@objectwidth / @width)
sphnum = Math.floor(@objectheight / @height)
@maxFrame = (spwnum * sphnum) - 1
@canvas = document.createElement("canvas")
@canvas.width = @objectwidth
@canvas.height = @objectheight
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = ((hnum - (Math.floor(index / wnum))) - 1) * @height
@context = @canvas.getContext("2d")
@context.drawImage(image, 0, 0, @objectwidth, @objectheight, 0, 0, @objectwidth, @objectheight)
@texture = new THREE.CanvasTexture(@canvas)
@texture.minFilter = THREE.LinearFilter
@texture.repeat.set(@width / @objectwidth, @height / @objectheight)
@texture.offset.x = startx / @objectwidth
@texture.offset.y = starty / @objectheight
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale)
material = new THREE.MeshBasicMaterial
map:@texture
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.rotation.z = @rotate * RAD
@mesh.castShadow = true
@texture.needsUpdate = true
#========================================================================
# primitive
#========================================================================
when 'primitive'
type = @object.object
switch (type)
when 'plane'
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale, @xsegments, @ysegments)
#material = new THREE.MeshPhongMaterial
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
specular: 0x999999
shininess: 60
side: THREE.DoubleSide
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
when 'cube'
@geometry = new THREE.BoxGeometry(@orgscale * @xscale, @orgscale * @yscale, @orgscale * @zscale)
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
transparent: true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
#========================================================================
# Collada
#========================================================================
when 'collada'
@mesh = @object.object.clone()
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.scale.set(@xscale * @orgscale, @yscale * @orgscale, @zscale * @orgscale)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
setCharacterPicture:(@frameIndex)->
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = (((hnum - (Math.floor(index / wnum))) - 1) * @height) - 1
@texture.offset.x = (startx / @objectwidth)
@texture.offset.y = (starty / @objectheight)
| 213640 | #****************************************************************************
#****************************************************************************
#****************************************************************************
#
# PixelirJS Sprite Library - pixelir_sprite.coffee
#
# 2016.11.17 Created by <NAME> <NAME>
#
#****************************************************************************
#****************************************************************************
#****************************************************************************
class pixelir_sprite
# public variables
@x = undefined
@y = undefined
@z = undefined
@frameIndex = undefined
@hidden = undefined
@object = undefined
@width = undefined
@height = undefined
@scalew = undefined
@scaleh = undefined
@maxFrame = undefined
@rotate = undefined
@layer = undefined
@spriteID = undefined
@patternList = undefined
@patternNum = undefined
@animetime = undefined
@mesh = undefined
@objectwidth = undefined
@objectheight = undefined
@canvas = undefined
@context = undefined
@texture = undefined
@geometroy = undefined
# private variable
RAD = Math.PI / 180.0
constructor:(arr)->
@x = if (arr['x']?) then arr['x'] else 0
@y = if (arr['y']?) then arr['y'] else 0
@z = if (arr['z']?) then arr['z'] else 0
@xs = if (arr['xs']?) then arr['xs'] else 0
@ys = if (arr['ys']?) then arr['ys'] else 0
@zs = if (arr['zs']?) then arr['zs'] else 0
@gravity = if (arr['gravity']?) then arr['gravity'] else 0
@frameIndex = if (arr['frameIndex']?) then arr['frameIndex'] else 0
@hidden = if (arr['hidden']?) then arr['hidden'] else false
@object = if (arr['object']?) then arr['object'] else undefined
@width = if (arr['width']?) then arr['width'] else 32
@height = if (arr['height']?) then arr['height'] else 32
@orgscale = if (arr['orgscale']?) then arr['orgscale'] else 1.0
@xscale = if (arr['xscale']?) then arr['xscale'] else 1.0
@yscale = if (arr['yscale']?) then arr['yscale'] else 1.0
@zscale = if (arr['zscale']?) then arr['zscale'] else 1.0
@rotate = if (arr['rotate']?) then arr['rotate'] else 0.0
@xrotate = if (arr['xrotate']?) then arr['xrotate'] else undefined
@yrotate = if (arr['yrotate']?) then arr['yrotate'] else undefined
@zrotate = if (arr['zrotate']?) then arr['zrotate'] else undefined
@xsegments = if (arr['xsegments']?) then arr['xsegments'] else 1
@ysegments = if (arr['ysegments']?) then arr['ysegments'] else 1
@color = if (arr['color']?) then arr['color'] else '0x000000'
@patternList = if (arr['patternList']?) then arr['patternList'] else [[100, [0]]]
@patternNum = if (arr['patternNum']?) then arr['patternNum'] else 0
@spriteID = if (arr['spriteID']?) then arr['spriteID'] else undefined
switch (@object.type)
#========================================================================
# 画像
#========================================================================
when 'image'
image = @object.object
# texture width
@objectwidth = image.width
# texture height
@objectheight = image.height
@animetime = new Date().getTime()
spwnum = Math.floor(@objectwidth / @width)
sphnum = Math.floor(@objectheight / @height)
@maxFrame = (spwnum * sphnum) - 1
@canvas = document.createElement("canvas")
@canvas.width = @objectwidth
@canvas.height = @objectheight
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = ((hnum - (Math.floor(index / wnum))) - 1) * @height
@context = @canvas.getContext("2d")
@context.drawImage(image, 0, 0, @objectwidth, @objectheight, 0, 0, @objectwidth, @objectheight)
@texture = new THREE.CanvasTexture(@canvas)
@texture.minFilter = THREE.LinearFilter
@texture.repeat.set(@width / @objectwidth, @height / @objectheight)
@texture.offset.x = startx / @objectwidth
@texture.offset.y = starty / @objectheight
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale)
material = new THREE.MeshBasicMaterial
map:@texture
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.rotation.z = @rotate * RAD
@mesh.castShadow = true
@texture.needsUpdate = true
#========================================================================
# primitive
#========================================================================
when 'primitive'
type = @object.object
switch (type)
when 'plane'
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale, @xsegments, @ysegments)
#material = new THREE.MeshPhongMaterial
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
specular: 0x999999
shininess: 60
side: THREE.DoubleSide
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
when 'cube'
@geometry = new THREE.BoxGeometry(@orgscale * @xscale, @orgscale * @yscale, @orgscale * @zscale)
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
transparent: true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
#========================================================================
# Collada
#========================================================================
when 'collada'
@mesh = @object.object.clone()
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.scale.set(@xscale * @orgscale, @yscale * @orgscale, @zscale * @orgscale)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
setCharacterPicture:(@frameIndex)->
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = (((hnum - (Math.floor(index / wnum))) - 1) * @height) - 1
@texture.offset.x = (startx / @objectwidth)
@texture.offset.y = (starty / @objectheight)
| true | #****************************************************************************
#****************************************************************************
#****************************************************************************
#
# PixelirJS Sprite Library - pixelir_sprite.coffee
#
# 2016.11.17 Created by PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI
#
#****************************************************************************
#****************************************************************************
#****************************************************************************
class pixelir_sprite
# public variables
@x = undefined
@y = undefined
@z = undefined
@frameIndex = undefined
@hidden = undefined
@object = undefined
@width = undefined
@height = undefined
@scalew = undefined
@scaleh = undefined
@maxFrame = undefined
@rotate = undefined
@layer = undefined
@spriteID = undefined
@patternList = undefined
@patternNum = undefined
@animetime = undefined
@mesh = undefined
@objectwidth = undefined
@objectheight = undefined
@canvas = undefined
@context = undefined
@texture = undefined
@geometroy = undefined
# private variable
RAD = Math.PI / 180.0
constructor:(arr)->
@x = if (arr['x']?) then arr['x'] else 0
@y = if (arr['y']?) then arr['y'] else 0
@z = if (arr['z']?) then arr['z'] else 0
@xs = if (arr['xs']?) then arr['xs'] else 0
@ys = if (arr['ys']?) then arr['ys'] else 0
@zs = if (arr['zs']?) then arr['zs'] else 0
@gravity = if (arr['gravity']?) then arr['gravity'] else 0
@frameIndex = if (arr['frameIndex']?) then arr['frameIndex'] else 0
@hidden = if (arr['hidden']?) then arr['hidden'] else false
@object = if (arr['object']?) then arr['object'] else undefined
@width = if (arr['width']?) then arr['width'] else 32
@height = if (arr['height']?) then arr['height'] else 32
@orgscale = if (arr['orgscale']?) then arr['orgscale'] else 1.0
@xscale = if (arr['xscale']?) then arr['xscale'] else 1.0
@yscale = if (arr['yscale']?) then arr['yscale'] else 1.0
@zscale = if (arr['zscale']?) then arr['zscale'] else 1.0
@rotate = if (arr['rotate']?) then arr['rotate'] else 0.0
@xrotate = if (arr['xrotate']?) then arr['xrotate'] else undefined
@yrotate = if (arr['yrotate']?) then arr['yrotate'] else undefined
@zrotate = if (arr['zrotate']?) then arr['zrotate'] else undefined
@xsegments = if (arr['xsegments']?) then arr['xsegments'] else 1
@ysegments = if (arr['ysegments']?) then arr['ysegments'] else 1
@color = if (arr['color']?) then arr['color'] else '0x000000'
@patternList = if (arr['patternList']?) then arr['patternList'] else [[100, [0]]]
@patternNum = if (arr['patternNum']?) then arr['patternNum'] else 0
@spriteID = if (arr['spriteID']?) then arr['spriteID'] else undefined
switch (@object.type)
#========================================================================
# 画像
#========================================================================
when 'image'
image = @object.object
# texture width
@objectwidth = image.width
# texture height
@objectheight = image.height
@animetime = new Date().getTime()
spwnum = Math.floor(@objectwidth / @width)
sphnum = Math.floor(@objectheight / @height)
@maxFrame = (spwnum * sphnum) - 1
@canvas = document.createElement("canvas")
@canvas.width = @objectwidth
@canvas.height = @objectheight
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = ((hnum - (Math.floor(index / wnum))) - 1) * @height
@context = @canvas.getContext("2d")
@context.drawImage(image, 0, 0, @objectwidth, @objectheight, 0, 0, @objectwidth, @objectheight)
@texture = new THREE.CanvasTexture(@canvas)
@texture.minFilter = THREE.LinearFilter
@texture.repeat.set(@width / @objectwidth, @height / @objectheight)
@texture.offset.x = startx / @objectwidth
@texture.offset.y = starty / @objectheight
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale)
material = new THREE.MeshBasicMaterial
map:@texture
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.rotation.z = @rotate * RAD
@mesh.castShadow = true
@texture.needsUpdate = true
#========================================================================
# primitive
#========================================================================
when 'primitive'
type = @object.object
switch (type)
when 'plane'
@geometry = new THREE.PlaneGeometry(@width * @orgscale * @xscale, @height * @orgscale * @yscale, @xsegments, @ysegments)
#material = new THREE.MeshPhongMaterial
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
specular: 0x999999
shininess: 60
side: THREE.DoubleSide
transparent:true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
when 'cube'
@geometry = new THREE.BoxGeometry(@orgscale * @xscale, @orgscale * @yscale, @orgscale * @zscale)
#material = new THREE.MeshLambertMaterial
material = new THREE.MeshStandardMaterial
color: parseInt(@color, 16)
transparent: true
@mesh = new THREE.Mesh(@geometry, material)
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
#========================================================================
# Collada
#========================================================================
when 'collada'
@mesh = @object.object.clone()
@mesh.castShadow = true
@mesh.receiveShadow = true
@mesh.position.set(@x, @y, @z)
@mesh.scale.set(@xscale * @orgscale, @yscale * @orgscale, @zscale * @orgscale)
@mesh.rotation.x = @xrotate * RAD if (@xrotate?)
@mesh.rotation.y = @yrotate * RAD if (@yrotate?)
@mesh.rotation.z = @zrotate * RAD if (@zrotate?)
setCharacterPicture:(@frameIndex)->
pattern = @patternList[@patternNum][1]
index = pattern[@frameIndex]
wnum = Math.floor(@objectwidth / @width)
hnum = Math.floor(@objectheight / @height)
if (index > @maxFrame)
num = index % @maxFrame
startx = (index % wnum) * @width
starty = (((hnum - (Math.floor(index / wnum))) - 1) * @height) - 1
@texture.offset.x = (startx / @objectwidth)
@texture.offset.y = (starty / @objectheight)
|
[
{
"context": "#\n# @license Andrew Brown v1.0.0\n# (c) 2013 http://monsterboxpro.com\n# Lice",
"end": 25,
"score": 0.9998111724853516,
"start": 13,
"tag": "NAME",
"value": "Andrew Brown"
},
{
"context": " req = @$http\n method: 'POST'\n url: name\n data: form_da... | app/assets/javascripts/monster/api.coffee | monsterboxpro/monster-javascripts | 0 | #
# @license Andrew Brown v1.0.0
# (c) 2013 http://monsterboxpro.com
# License: MIT
#
# Passing Params
# @Api.users.index({student_id: 23})
#
# Add a tag to end of event string
# @Api.users.index({},tag: 'success') => 'users/index#success'
parameter_name = (root)->
name = root[0]
name += '[' + root.slice(1).join('][') + ']' if root.length > 1
name
has_attached_file = (value)->
result = false
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
result |= has_attached_file v
else if typeof value == 'array'
for vv in v
result |= has_attached_file vv
else
result |= value instanceof File
return result
form_object_to_form_data = (value,fd=null,root=[]) ->
fd = new FormData() unless fd
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
form_object_to_form_data v, fd, root.concat [k]
else if typeof value == 'array'
for i,vv in value
form_object_to_form_data vv, fd, root.concat [i]
else
return if _.last(root)[0] == '$' # Skip angular attributes like $$hashKey
fd.append parameter_name(root), value
fd
class window.ApiBase
_get:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'GET'
url : name
params : params
@_callback table_name, req, action, opts
_post:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'POST'
url: name
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.post name, params
@_callback table_name, req, action, opts
_put:(table_name,action,name,params,opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'PUT'
url: name
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.put name, params
@_callback table_name, req, action, opts
_delete:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'DELETE'
url : name
params : params
@_callback table_name, req, action, opts
_callback:(table_name,req,action,opts)=>
msg = "#{table_name}/#{action}"
msg = "#{@_scope}/#{msg}" if @_scope
nmsg = if opts.tag
"#{msg}##{opts.tag}"
else
msg
req.success (data, status, headers, config)=> @$rootScope.$broadcast nmsg , data, opts, status, headers, config
req.error (data, status, headers, config)=> @$rootScope.$broadcast "#{msg}#err", data, opts, status, headers, config
req
_extract_id:(model)=>
if typeof model is 'string' || typeof model is 'number'
model
else
model.id
path:(args...)=>
path = []
path.push @namespace if @namespace
#path.push args.shift
path.push a for a in args
path = path.join '/'
"/#{path}"
constructor:(@$rootScope,@$http)->
_.each @resources, (options, table_name) =>
@[table_name] =
index : (params,opts)=> @_get table_name, 'index' , @path(table_name) , params, opts
new : (params,opts)=> @_get table_name, 'new' , @path(table_name,'new') , params, opts
create : (params,opts)=> @_post table_name, 'create' , @path(table_name) , params, opts
show : (model,params,opts)=> @_get table_name, 'show' , @path(table_name,@_extract_id(model)) , params, opts
edit : (model,params,opts)=> @_get table_name, 'edit' , @path(table_name,@_extract_id(model),'edit') , params, opts
update : (model,params,opts)=> @_put table_name, 'update' , @path(table_name,@_extract_id(model)) , params, opts
destroy : (model,params,opts)=> @_delete table_name, 'destroy', @path(table_name,@_extract_id(model)) , params, opts
_.each options.collection, (method, action) =>
name = @path table_name, action
fun = switch method
when 'get' then (params,opts)=> @_get table_name, action, name, params, opts
when 'post' then (params,opts)=> @_post table_name, action, name, params, opts
when 'put' then (params,opts)=> @_put table_name, action, name, params, opts
when 'destroy' then (params,opts)=> @_delete table_name, action, name, params, opts
@[table_name][action] = fun
_.each options.member, (method, action) =>
fun = switch method
when 'get' then (model,params,opts)=> @_get table_name, action, @path(table_name, model.id, action), params, opts
when 'post' then (model,params,opts)=> @_post table_name, action, @path(table_name, model.id, action), params, opts
when 'put' then (model,params,opts)=> @_put table_name, action, @path(table_name, model.id, action), params, opts
when 'destroy' then (model,params,opts)=> @_delete table_name, action, @path(table_name, model.id, action), params, opts
@[table_name][action] = fun
scope:(args...)=>
# Scopes the URL & events
# Task.scope('stories', story.id).create { name: 'New Task' }
# Event names will look like: stories/1/tasks/create
# URL will be: api/v1/stories/1/tasks
scope = args.join '/'
result = @prefix scope
result._scope ?= []
result._scope.push scope
result
prefix:(args...)=>
# Prefixes the URL
# Story.prefix('projects/1').create { name: 'New Story' }
# Event name will look like: stories/create
# URL will be: api/v1/projects/1/storyes
clone = new @constructor @$rootScope, @$http
namespace = args.join '/'
clone.namespace = "#{@namespace}/#{namespace}"
clone
| 216769 | #
# @license <NAME> v1.0.0
# (c) 2013 http://monsterboxpro.com
# License: MIT
#
# Passing Params
# @Api.users.index({student_id: 23})
#
# Add a tag to end of event string
# @Api.users.index({},tag: 'success') => 'users/index#success'
parameter_name = (root)->
name = root[0]
name += '[' + root.slice(1).join('][') + ']' if root.length > 1
name
has_attached_file = (value)->
result = false
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
result |= has_attached_file v
else if typeof value == 'array'
for vv in v
result |= has_attached_file vv
else
result |= value instanceof File
return result
form_object_to_form_data = (value,fd=null,root=[]) ->
fd = new FormData() unless fd
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
form_object_to_form_data v, fd, root.concat [k]
else if typeof value == 'array'
for i,vv in value
form_object_to_form_data vv, fd, root.concat [i]
else
return if _.last(root)[0] == '$' # Skip angular attributes like $$hashKey
fd.append parameter_name(root), value
fd
class window.ApiBase
_get:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'GET'
url : name
params : params
@_callback table_name, req, action, opts
_post:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'POST'
url: <NAME>
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.post name, params
@_callback table_name, req, action, opts
_put:(table_name,action,name,params,opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'PUT'
url: name
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.put name, params
@_callback table_name, req, action, opts
_delete:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'DELETE'
url : name
params : params
@_callback table_name, req, action, opts
_callback:(table_name,req,action,opts)=>
msg = "#{table_name}/#{action}"
msg = "#{@_scope}/#{msg}" if @_scope
nmsg = if opts.tag
"#{msg}##{opts.tag}"
else
msg
req.success (data, status, headers, config)=> @$rootScope.$broadcast nmsg , data, opts, status, headers, config
req.error (data, status, headers, config)=> @$rootScope.$broadcast "#{msg}#err", data, opts, status, headers, config
req
_extract_id:(model)=>
if typeof model is 'string' || typeof model is 'number'
model
else
model.id
path:(args...)=>
path = []
path.push @namespace if @namespace
#path.push args.shift
path.push a for a in args
path = path.join '/'
"/#{path}"
constructor:(@$rootScope,@$http)->
_.each @resources, (options, table_name) =>
@[table_name] =
index : (params,opts)=> @_get table_name, 'index' , @path(table_name) , params, opts
new : (params,opts)=> @_get table_name, 'new' , @path(table_name,'new') , params, opts
create : (params,opts)=> @_post table_name, 'create' , @path(table_name) , params, opts
show : (model,params,opts)=> @_get table_name, 'show' , @path(table_name,@_extract_id(model)) , params, opts
edit : (model,params,opts)=> @_get table_name, 'edit' , @path(table_name,@_extract_id(model),'edit') , params, opts
update : (model,params,opts)=> @_put table_name, 'update' , @path(table_name,@_extract_id(model)) , params, opts
destroy : (model,params,opts)=> @_delete table_name, 'destroy', @path(table_name,@_extract_id(model)) , params, opts
_.each options.collection, (method, action) =>
name = @path table_name, action
fun = switch method
when 'get' then (params,opts)=> @_get table_name, action, name, params, opts
when 'post' then (params,opts)=> @_post table_name, action, name, params, opts
when 'put' then (params,opts)=> @_put table_name, action, name, params, opts
when 'destroy' then (params,opts)=> @_delete table_name, action, name, params, opts
@[table_name][action] = fun
_.each options.member, (method, action) =>
fun = switch method
when 'get' then (model,params,opts)=> @_get table_name, action, @path(table_name, model.id, action), params, opts
when 'post' then (model,params,opts)=> @_post table_name, action, @path(table_name, model.id, action), params, opts
when 'put' then (model,params,opts)=> @_put table_name, action, @path(table_name, model.id, action), params, opts
when 'destroy' then (model,params,opts)=> @_delete table_name, action, @path(table_name, model.id, action), params, opts
@[table_name][action] = fun
scope:(args...)=>
# Scopes the URL & events
# Task.scope('stories', story.id).create { name: '<NAME> Task' }
# Event names will look like: stories/1/tasks/create
# URL will be: api/v1/stories/1/tasks
scope = args.join '/'
result = @prefix scope
result._scope ?= []
result._scope.push scope
result
prefix:(args...)=>
# Prefixes the URL
# Story.prefix('projects/1').create { name: '<NAME> Story' }
# Event name will look like: stories/create
# URL will be: api/v1/projects/1/storyes
clone = new @constructor @$rootScope, @$http
namespace = args.join '/'
clone.namespace = "#{@namespace}/#{namespace}"
clone
| true | #
# @license PI:NAME:<NAME>END_PI v1.0.0
# (c) 2013 http://monsterboxpro.com
# License: MIT
#
# Passing Params
# @Api.users.index({student_id: 23})
#
# Add a tag to end of event string
# @Api.users.index({},tag: 'success') => 'users/index#success'
parameter_name = (root)->
name = root[0]
name += '[' + root.slice(1).join('][') + ']' if root.length > 1
name
has_attached_file = (value)->
result = false
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
result |= has_attached_file v
else if typeof value == 'array'
for vv in v
result |= has_attached_file vv
else
result |= value instanceof File
return result
form_object_to_form_data = (value,fd=null,root=[]) ->
fd = new FormData() unless fd
if typeof value == 'object' && !(value instanceof File)
for own k,v of value
form_object_to_form_data v, fd, root.concat [k]
else if typeof value == 'array'
for i,vv in value
form_object_to_form_data vv, fd, root.concat [i]
else
return if _.last(root)[0] == '$' # Skip angular attributes like $$hashKey
fd.append parameter_name(root), value
fd
class window.ApiBase
_get:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'GET'
url : name
params : params
@_callback table_name, req, action, opts
_post:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'POST'
url: PI:NAME:<NAME>END_PI
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.post name, params
@_callback table_name, req, action, opts
_put:(table_name,action,name,params,opts={})=>
params.socket_id = window.socket_id if window.socket_id
if has_attached_file(params)
form_data = form_object_to_form_data(params)
req = @$http
method: 'PUT'
url: name
data: form_data
transformRequest: angular.identity
headers:
'Content-Type': undefined
else
req = @$http.put name, params
@_callback table_name, req, action, opts
_delete:(table_name,action,name,params={},opts={})=>
params.socket_id = window.socket_id if window.socket_id
req = @$http
method : 'DELETE'
url : name
params : params
@_callback table_name, req, action, opts
_callback:(table_name,req,action,opts)=>
msg = "#{table_name}/#{action}"
msg = "#{@_scope}/#{msg}" if @_scope
nmsg = if opts.tag
"#{msg}##{opts.tag}"
else
msg
req.success (data, status, headers, config)=> @$rootScope.$broadcast nmsg , data, opts, status, headers, config
req.error (data, status, headers, config)=> @$rootScope.$broadcast "#{msg}#err", data, opts, status, headers, config
req
_extract_id:(model)=>
if typeof model is 'string' || typeof model is 'number'
model
else
model.id
path:(args...)=>
path = []
path.push @namespace if @namespace
#path.push args.shift
path.push a for a in args
path = path.join '/'
"/#{path}"
constructor:(@$rootScope,@$http)->
_.each @resources, (options, table_name) =>
@[table_name] =
index : (params,opts)=> @_get table_name, 'index' , @path(table_name) , params, opts
new : (params,opts)=> @_get table_name, 'new' , @path(table_name,'new') , params, opts
create : (params,opts)=> @_post table_name, 'create' , @path(table_name) , params, opts
show : (model,params,opts)=> @_get table_name, 'show' , @path(table_name,@_extract_id(model)) , params, opts
edit : (model,params,opts)=> @_get table_name, 'edit' , @path(table_name,@_extract_id(model),'edit') , params, opts
update : (model,params,opts)=> @_put table_name, 'update' , @path(table_name,@_extract_id(model)) , params, opts
destroy : (model,params,opts)=> @_delete table_name, 'destroy', @path(table_name,@_extract_id(model)) , params, opts
_.each options.collection, (method, action) =>
name = @path table_name, action
fun = switch method
when 'get' then (params,opts)=> @_get table_name, action, name, params, opts
when 'post' then (params,opts)=> @_post table_name, action, name, params, opts
when 'put' then (params,opts)=> @_put table_name, action, name, params, opts
when 'destroy' then (params,opts)=> @_delete table_name, action, name, params, opts
@[table_name][action] = fun
_.each options.member, (method, action) =>
fun = switch method
when 'get' then (model,params,opts)=> @_get table_name, action, @path(table_name, model.id, action), params, opts
when 'post' then (model,params,opts)=> @_post table_name, action, @path(table_name, model.id, action), params, opts
when 'put' then (model,params,opts)=> @_put table_name, action, @path(table_name, model.id, action), params, opts
when 'destroy' then (model,params,opts)=> @_delete table_name, action, @path(table_name, model.id, action), params, opts
@[table_name][action] = fun
scope:(args...)=>
# Scopes the URL & events
# Task.scope('stories', story.id).create { name: 'PI:NAME:<NAME>END_PI Task' }
# Event names will look like: stories/1/tasks/create
# URL will be: api/v1/stories/1/tasks
scope = args.join '/'
result = @prefix scope
result._scope ?= []
result._scope.push scope
result
prefix:(args...)=>
# Prefixes the URL
# Story.prefix('projects/1').create { name: 'PI:NAME:<NAME>END_PI Story' }
# Event name will look like: stories/create
# URL will be: api/v1/projects/1/storyes
clone = new @constructor @$rootScope, @$http
namespace = args.join '/'
clone.namespace = "#{@namespace}/#{namespace}"
clone
|
[
{
"context": "# Copyright (c) 2014. David M. Lee, II <leedm777@yahoo.com>\n'use strict'\n\nPromise = ",
"end": 34,
"score": 0.9998535513877869,
"start": 22,
"tag": "NAME",
"value": "David M. Lee"
},
{
"context": "# Copyright (c) 2014. David M. Lee, II <leedm777@yahoo.com>\n'use stric... | src/configure.coffee | building5/parfaitjs | 1 | # Copyright (c) 2014. David M. Lee, II <leedm777@yahoo.com>
'use strict'
Promise = (require 'es6-promise').Promise
appdirs = require 'appdirs'
confmerge = require './confmerge'
jsyaml = require 'js-yaml'
path = require 'path'
{readFile, stat, readdir} = require './fs-promise'
###
Process configuration files.
@param {String} environment Environment to select.
@param {String} directory Directory to process configuration files.
@param {*} preConfig Base configuration to start with.
@param {*} postConfig Config to merge on top of final result.
@return {Promise<Object>} Consolidated configuration object.
###
configure = ({environment, directory, preConfig, postConfig}) ->
environment ?= process.env.NODE_ENV || 'development'
directory ?= 'config'
preConfig ?= {}
postConfig ?= {}
envDirectory = (dir) ->
if dir
path.join dir, "#{environment}.env"
# Apply the base config to the hard coded preConfig
processDirectory directory, preConfig
.then (baseConfig) ->
# Now the environment specific base config
processDirectory envDirectory(directory), baseConfig
.then (baseEnvConfig) ->
app = baseEnvConfig.appdirs || {}
appName = app.appName
appAuthor = app.appAuthor
if appName
a = new appdirs.AppDirs(appName, appAuthor)
app.siteConfigDir ?= a.siteConfigDir()
app.siteDataDir ?= a.siteDataDir()
app.userCacheDir ?= a.userCacheDir()
app.userConfigDir ?= a.userConfigDir()
app.userDataDir ?= a.userDataDir()
app.userLogDir ?= a.userLogDir()
# Now the site config
processDirectory app.siteConfigDir, baseEnvConfig
.then (siteConfig) ->
# Now the environment specific site config
processDirectory envDirectory(app.siteConfigDir), siteConfig
.then (siteEnvConfig) ->
# Now the user config
processDirectory app.userConfigDir, siteEnvConfig
.then (userConfig) ->
# Now the environment specific user config
processDirectory envDirectory(app.userConfigDir), userConfig
.then (userEnvConfig) ->
confmerge userEnvConfig, postConfig
.then (finalConfig) ->
finalConfig.environment = environment
finalConfig
###
Process a directory for configuration files, merging with baseConfig.
Each file and subdirectory in `directory` is merged with the field in
`baseConfig` of the corresponding name.
@param {String} directory - Directory to parse configuration files from.
@param {Object} baseConfig - Base configuration to merge into.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processDirectory = (directory, baseConfig) ->
if not directory
Promise.resolve baseConfig
else
readdir directory
.then (dir) ->
Promise.all dir.map (file) -> processPath(directory, file)
.then (res) ->
res.reduce confmerge, baseConfig
.catch (err) ->
# Missing directories are fine; just return the base config
if err.code != 'ENOENT'
console.error "Error reading directory: #{err.message}"
throw err
Promise.resolve baseConfig
###
Process a configuration file, or directory of files. A Promise of its
corresponding configuration object is returned.
@param basedir Directory `file` is in.
@param file Name of the file to parse.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processPath = (basedir, file) ->
ext = path.extname file
basename = path.basename file, ext
file = path.join basedir, file
if ext == '.json'
readFile file
.then (contents) ->
res = {}
res[basename] = JSON.parse(contents)
res
else if ext == '.yaml'
readFile file
.then (contents) ->
res = {}
res[basename] = jsyaml.safeLoad(contents)
res
else if ext == '.env'
# Environment; skip
Promise.resolve {}
else
stat file
.then (stats) ->
if stats.isDirectory()
processDirectory file, {}
else
console.error "Unrecognized file type '#{file}'"
{}
.then (subConfig) ->
res = {}
res[basename] = subConfig
res
module.exports = configure
| 115399 | # Copyright (c) 2014. <NAME>, II <<EMAIL>>
'use strict'
Promise = (require 'es6-promise').Promise
appdirs = require 'appdirs'
confmerge = require './confmerge'
jsyaml = require 'js-yaml'
path = require 'path'
{readFile, stat, readdir} = require './fs-promise'
###
Process configuration files.
@param {String} environment Environment to select.
@param {String} directory Directory to process configuration files.
@param {*} preConfig Base configuration to start with.
@param {*} postConfig Config to merge on top of final result.
@return {Promise<Object>} Consolidated configuration object.
###
configure = ({environment, directory, preConfig, postConfig}) ->
environment ?= process.env.NODE_ENV || 'development'
directory ?= 'config'
preConfig ?= {}
postConfig ?= {}
envDirectory = (dir) ->
if dir
path.join dir, "#{environment}.env"
# Apply the base config to the hard coded preConfig
processDirectory directory, preConfig
.then (baseConfig) ->
# Now the environment specific base config
processDirectory envDirectory(directory), baseConfig
.then (baseEnvConfig) ->
app = baseEnvConfig.appdirs || {}
appName = app.appName
appAuthor = app.appAuthor
if appName
a = new appdirs.AppDirs(appName, appAuthor)
app.siteConfigDir ?= a.siteConfigDir()
app.siteDataDir ?= a.siteDataDir()
app.userCacheDir ?= a.userCacheDir()
app.userConfigDir ?= a.userConfigDir()
app.userDataDir ?= a.userDataDir()
app.userLogDir ?= a.userLogDir()
# Now the site config
processDirectory app.siteConfigDir, baseEnvConfig
.then (siteConfig) ->
# Now the environment specific site config
processDirectory envDirectory(app.siteConfigDir), siteConfig
.then (siteEnvConfig) ->
# Now the user config
processDirectory app.userConfigDir, siteEnvConfig
.then (userConfig) ->
# Now the environment specific user config
processDirectory envDirectory(app.userConfigDir), userConfig
.then (userEnvConfig) ->
confmerge userEnvConfig, postConfig
.then (finalConfig) ->
finalConfig.environment = environment
finalConfig
###
Process a directory for configuration files, merging with baseConfig.
Each file and subdirectory in `directory` is merged with the field in
`baseConfig` of the corresponding name.
@param {String} directory - Directory to parse configuration files from.
@param {Object} baseConfig - Base configuration to merge into.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processDirectory = (directory, baseConfig) ->
if not directory
Promise.resolve baseConfig
else
readdir directory
.then (dir) ->
Promise.all dir.map (file) -> processPath(directory, file)
.then (res) ->
res.reduce confmerge, baseConfig
.catch (err) ->
# Missing directories are fine; just return the base config
if err.code != 'ENOENT'
console.error "Error reading directory: #{err.message}"
throw err
Promise.resolve baseConfig
###
Process a configuration file, or directory of files. A Promise of its
corresponding configuration object is returned.
@param basedir Directory `file` is in.
@param file Name of the file to parse.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processPath = (basedir, file) ->
ext = path.extname file
basename = path.basename file, ext
file = path.join basedir, file
if ext == '.json'
readFile file
.then (contents) ->
res = {}
res[basename] = JSON.parse(contents)
res
else if ext == '.yaml'
readFile file
.then (contents) ->
res = {}
res[basename] = jsyaml.safeLoad(contents)
res
else if ext == '.env'
# Environment; skip
Promise.resolve {}
else
stat file
.then (stats) ->
if stats.isDirectory()
processDirectory file, {}
else
console.error "Unrecognized file type '#{file}'"
{}
.then (subConfig) ->
res = {}
res[basename] = subConfig
res
module.exports = configure
| true | # Copyright (c) 2014. PI:NAME:<NAME>END_PI, II <PI:EMAIL:<EMAIL>END_PI>
'use strict'
Promise = (require 'es6-promise').Promise
appdirs = require 'appdirs'
confmerge = require './confmerge'
jsyaml = require 'js-yaml'
path = require 'path'
{readFile, stat, readdir} = require './fs-promise'
###
Process configuration files.
@param {String} environment Environment to select.
@param {String} directory Directory to process configuration files.
@param {*} preConfig Base configuration to start with.
@param {*} postConfig Config to merge on top of final result.
@return {Promise<Object>} Consolidated configuration object.
###
configure = ({environment, directory, preConfig, postConfig}) ->
environment ?= process.env.NODE_ENV || 'development'
directory ?= 'config'
preConfig ?= {}
postConfig ?= {}
envDirectory = (dir) ->
if dir
path.join dir, "#{environment}.env"
# Apply the base config to the hard coded preConfig
processDirectory directory, preConfig
.then (baseConfig) ->
# Now the environment specific base config
processDirectory envDirectory(directory), baseConfig
.then (baseEnvConfig) ->
app = baseEnvConfig.appdirs || {}
appName = app.appName
appAuthor = app.appAuthor
if appName
a = new appdirs.AppDirs(appName, appAuthor)
app.siteConfigDir ?= a.siteConfigDir()
app.siteDataDir ?= a.siteDataDir()
app.userCacheDir ?= a.userCacheDir()
app.userConfigDir ?= a.userConfigDir()
app.userDataDir ?= a.userDataDir()
app.userLogDir ?= a.userLogDir()
# Now the site config
processDirectory app.siteConfigDir, baseEnvConfig
.then (siteConfig) ->
# Now the environment specific site config
processDirectory envDirectory(app.siteConfigDir), siteConfig
.then (siteEnvConfig) ->
# Now the user config
processDirectory app.userConfigDir, siteEnvConfig
.then (userConfig) ->
# Now the environment specific user config
processDirectory envDirectory(app.userConfigDir), userConfig
.then (userEnvConfig) ->
confmerge userEnvConfig, postConfig
.then (finalConfig) ->
finalConfig.environment = environment
finalConfig
###
Process a directory for configuration files, merging with baseConfig.
Each file and subdirectory in `directory` is merged with the field in
`baseConfig` of the corresponding name.
@param {String} directory - Directory to parse configuration files from.
@param {Object} baseConfig - Base configuration to merge into.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processDirectory = (directory, baseConfig) ->
if not directory
Promise.resolve baseConfig
else
readdir directory
.then (dir) ->
Promise.all dir.map (file) -> processPath(directory, file)
.then (res) ->
res.reduce confmerge, baseConfig
.catch (err) ->
# Missing directories are fine; just return the base config
if err.code != 'ENOENT'
console.error "Error reading directory: #{err.message}"
throw err
Promise.resolve baseConfig
###
Process a configuration file, or directory of files. A Promise of its
corresponding configuration object is returned.
@param basedir Directory `file` is in.
@param file Name of the file to parse.
@return {Promise<Object>} Resulting merged configuration.
@private
###
processPath = (basedir, file) ->
ext = path.extname file
basename = path.basename file, ext
file = path.join basedir, file
if ext == '.json'
readFile file
.then (contents) ->
res = {}
res[basename] = JSON.parse(contents)
res
else if ext == '.yaml'
readFile file
.then (contents) ->
res = {}
res[basename] = jsyaml.safeLoad(contents)
res
else if ext == '.env'
# Environment; skip
Promise.resolve {}
else
stat file
.then (stats) ->
if stats.isDirectory()
processDirectory file, {}
else
console.error "Unrecognized file type '#{file}'"
{}
.then (subConfig) ->
res = {}
res[basename] = subConfig
res
module.exports = configure
|
[
{
"context": "# * https://github.com/jasoncypret/expressionUI\n# *\n# * Copyright (c) 2013 Jason Cyp",
"end": 34,
"score": 0.999526858329773,
"start": 23,
"tag": "USERNAME",
"value": "jasoncypret"
},
{
"context": "asoncypret/expressionUI\n# *\n# * Copyright (c) 2013 Jason Cypret (htt... | vendor/assets/javascripts/expressionui.modal.js.coffee | jasoncypret/expressionUI | 0 | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 Jason Cypret (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
title: "Alert"
id: "modal"
closeID: "closeDialog"
overlay: true
overlayMrk: "<div class='pageOverlay'></div>"
close_on_overlay: false
enter_to_submit: false
appendTo: "body"
animation: ""
threshold: 15
ajax: ""
ajaxTarget: ""
autoresize: true
notify: false
width: "auto"
height: "auto"
header: true
headerContent: ""
footer: true
removeContent: false
buttons: [
buttonText: "Ok"
callback: ->
return
defaultButton: true
]
closeCallback: jQuery.noop
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
afterAjax: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
jQuery(this).modal "_open", options
_open: (options) ->
options.beforeOpen()
overlay = ""
isMobile = ""
buildModal = undefined
buttons = ""
jQuery.each options.buttons, (i, btn) ->
defaultBtn = ""
defaultBtn = " defaultButton" unless typeof btn.defaultButton is "undefined"
className = ""
className = btn.className unless typeof btn.className is "undefined"
buttons += "<a href=\"javascript:;\" id=\"modalBtn" + i + "\" class=\"" + className + defaultBtn + "\"><span>" + btn.buttonText + "</span></a>"
options.width = options.width + "px" unless options.width is "auto"
overlay = options.overlayMrk if options.overlay
buildModal = "<div id=\"" + options.id + "\" class=\"modal_wrapper " + isMobile + " " + options.height + "\">" + overlay + "<div style=\"width:" + options.width + ";\" class=\"modal\">"
if options.header
buildModal += "<h1><span class=\"wrapper\"><span class=\"title\">" + options.title + "</span><a href=\"javascript:;\" id=\"" + options.closeID + "\" class=\"closeDialog " + options.closeClass + "\">x</a></span></h1>"
else
buttons += "<a href=\"javascript:;\" class=\"closeDialog\"><span>Cancel</span></a>"
buildModal += "<div class=\"modalBody\"><div class=\"wrapper\"><div class=\"d_content\"></div></div></div>"
buildModal += "<div class=\"modalFooter\"><span class=\"wrapper\">" + buttons + "</span></div>" if options.footer
buildModal += "</div></div>"
jQuery(options.appendTo).append buildModal
jQuery(options.headerContent).appendTo("#" + options.id + " h1 .wrapper").css "display", "block" if options.headerContent and options.header
jQuery(this).appendTo("#" + options.id + " .d_content").css "display", "block"
jQuery(this).modal "ajax", options if options.ajax
jQuery("#" + options.id + " .modal").modal "position", options
jQuery("#" + options.id).modal "_setupEvents", options
jQuery(options.appendTo).addClass "modal_open"
options.afterOpen.call()
ajax: (options) ->
_this = jQuery(this)
options.ajax = options.ajax + " " + options.ajaxTarget if options.ajaxTarget
if options.notify
jQuery(_this).parents(".modalBody").notify
style: "none"
position: "middle"
loading: "circles"
sticky: true
content: "hidden"
_this.load options.ajax, (response, status, xhr) =>
jQuery(_this).parents(".modalBody").find(".notify").notify "close"
setTimeout =>
options.afterAjax.call()
_this.parents(".modal:first").modal "position", options
, 100
if status is "error"
msg = "Sorry but there was an error: "
alert msg + xhr.status + " " + xhr.statusText
close: (options={}) ->
options = jQuery.extend({}, methods.defaults, options) # TODO: This is not extending!
modal_content = jQuery(this).find(".d_content").children()
parent = jQuery(this).parents(".modal_wrapper")
m = jQuery(".modal")
remove_and_clean = ->
unless options.removeContent
modal_content.appendTo(options.appendTo).css "display", "none"
jQuery(options.headerContent).appendTo(options.appendTo).css "display", "none" if options.headerContent and options.header
else
modal_content.remove()
unless m[1]
jQuery(options.appendTo).removeClass "modal_close"
jQuery(window).unbind "resize.modal"
parent.remove()
jQuery(this).unbind 'oanimationend animationend webkitAnimationEnd'
options.afterClose()
options.beforeClose()
# Checking for multiple open
if m[1]
jQuery(m[m.length - 1]).fadeOut "fast", =>
remove_and_clean()
else
jQuery(this).bind 'oanimationend animationend webkitAnimationEnd', =>
remove_and_clean()
jQuery(options.appendTo).removeClass("modal_open").addClass "modal_close"
_setupEvents: (options) ->
_this = jQuery(this)
if options.autoresize
jQuery(window).bind "resize.modal", ->
_this.find(".modal").modal "position", options
if options.enter_to_submit
form = jQuery(this).find 'form'
jQuery(document).bind('keypress', (e) =>
code = e.keyCode || e.which
if code == 13
form.submit()
options.submit_callback() if (typeof options.submit_callback is not "undefined")
)
if options.overlay and options.close_on_overlay
parser = new DOMParser()
over = parser.parseFromString(options.overlayMrk, "text/xml");
klass = jQuery(over.firstChild).attr('class')
jQuery("." + klass).click( =>
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
)
jQuery(this).find(".modalFooter a:not(.closeDialog)").each( (i, e) =>
jQuery(e).click( =>
if (options.buttons[i].context)
options.buttons[i].callback.apply options.buttons[i].context
else
options.buttons[i].callback()
)
)
jQuery(this).find(".closeDialog").click ->
# TODO: Should prob make this a default for easier reading
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
position: (options) ->
modal = jQuery(this)
modal.css "bottom", "auto"
modalHeight = modal.outerHeight()
modalPadding = modalHeight - modal.height()
win = jQuery(window).height()
threshold = options.threshold
if options.height is "auto"
if modalHeight > (win - (threshold * 2))
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
"margin-top": 0
else
modal.css
top: "50%"
"margin-top": -(modal.outerHeight() / 2)
"margin-left": -(modal.outerWidth() / 2)
else
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
jQuery.fn.modal = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Modal"
) jQuery | 201044 | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 <NAME> (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
title: "Alert"
id: "modal"
closeID: "closeDialog"
overlay: true
overlayMrk: "<div class='pageOverlay'></div>"
close_on_overlay: false
enter_to_submit: false
appendTo: "body"
animation: ""
threshold: 15
ajax: ""
ajaxTarget: ""
autoresize: true
notify: false
width: "auto"
height: "auto"
header: true
headerContent: ""
footer: true
removeContent: false
buttons: [
buttonText: "Ok"
callback: ->
return
defaultButton: true
]
closeCallback: jQuery.noop
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
afterAjax: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
jQuery(this).modal "_open", options
_open: (options) ->
options.beforeOpen()
overlay = ""
isMobile = ""
buildModal = undefined
buttons = ""
jQuery.each options.buttons, (i, btn) ->
defaultBtn = ""
defaultBtn = " defaultButton" unless typeof btn.defaultButton is "undefined"
className = ""
className = btn.className unless typeof btn.className is "undefined"
buttons += "<a href=\"javascript:;\" id=\"modalBtn" + i + "\" class=\"" + className + defaultBtn + "\"><span>" + btn.buttonText + "</span></a>"
options.width = options.width + "px" unless options.width is "auto"
overlay = options.overlayMrk if options.overlay
buildModal = "<div id=\"" + options.id + "\" class=\"modal_wrapper " + isMobile + " " + options.height + "\">" + overlay + "<div style=\"width:" + options.width + ";\" class=\"modal\">"
if options.header
buildModal += "<h1><span class=\"wrapper\"><span class=\"title\">" + options.title + "</span><a href=\"javascript:;\" id=\"" + options.closeID + "\" class=\"closeDialog " + options.closeClass + "\">x</a></span></h1>"
else
buttons += "<a href=\"javascript:;\" class=\"closeDialog\"><span>Cancel</span></a>"
buildModal += "<div class=\"modalBody\"><div class=\"wrapper\"><div class=\"d_content\"></div></div></div>"
buildModal += "<div class=\"modalFooter\"><span class=\"wrapper\">" + buttons + "</span></div>" if options.footer
buildModal += "</div></div>"
jQuery(options.appendTo).append buildModal
jQuery(options.headerContent).appendTo("#" + options.id + " h1 .wrapper").css "display", "block" if options.headerContent and options.header
jQuery(this).appendTo("#" + options.id + " .d_content").css "display", "block"
jQuery(this).modal "ajax", options if options.ajax
jQuery("#" + options.id + " .modal").modal "position", options
jQuery("#" + options.id).modal "_setupEvents", options
jQuery(options.appendTo).addClass "modal_open"
options.afterOpen.call()
ajax: (options) ->
_this = jQuery(this)
options.ajax = options.ajax + " " + options.ajaxTarget if options.ajaxTarget
if options.notify
jQuery(_this).parents(".modalBody").notify
style: "none"
position: "middle"
loading: "circles"
sticky: true
content: "hidden"
_this.load options.ajax, (response, status, xhr) =>
jQuery(_this).parents(".modalBody").find(".notify").notify "close"
setTimeout =>
options.afterAjax.call()
_this.parents(".modal:first").modal "position", options
, 100
if status is "error"
msg = "Sorry but there was an error: "
alert msg + xhr.status + " " + xhr.statusText
close: (options={}) ->
options = jQuery.extend({}, methods.defaults, options) # TODO: This is not extending!
modal_content = jQuery(this).find(".d_content").children()
parent = jQuery(this).parents(".modal_wrapper")
m = jQuery(".modal")
remove_and_clean = ->
unless options.removeContent
modal_content.appendTo(options.appendTo).css "display", "none"
jQuery(options.headerContent).appendTo(options.appendTo).css "display", "none" if options.headerContent and options.header
else
modal_content.remove()
unless m[1]
jQuery(options.appendTo).removeClass "modal_close"
jQuery(window).unbind "resize.modal"
parent.remove()
jQuery(this).unbind 'oanimationend animationend webkitAnimationEnd'
options.afterClose()
options.beforeClose()
# Checking for multiple open
if m[1]
jQuery(m[m.length - 1]).fadeOut "fast", =>
remove_and_clean()
else
jQuery(this).bind 'oanimationend animationend webkitAnimationEnd', =>
remove_and_clean()
jQuery(options.appendTo).removeClass("modal_open").addClass "modal_close"
_setupEvents: (options) ->
_this = jQuery(this)
if options.autoresize
jQuery(window).bind "resize.modal", ->
_this.find(".modal").modal "position", options
if options.enter_to_submit
form = jQuery(this).find 'form'
jQuery(document).bind('keypress', (e) =>
code = e.keyCode || e.which
if code == 13
form.submit()
options.submit_callback() if (typeof options.submit_callback is not "undefined")
)
if options.overlay and options.close_on_overlay
parser = new DOMParser()
over = parser.parseFromString(options.overlayMrk, "text/xml");
klass = jQuery(over.firstChild).attr('class')
jQuery("." + klass).click( =>
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
)
jQuery(this).find(".modalFooter a:not(.closeDialog)").each( (i, e) =>
jQuery(e).click( =>
if (options.buttons[i].context)
options.buttons[i].callback.apply options.buttons[i].context
else
options.buttons[i].callback()
)
)
jQuery(this).find(".closeDialog").click ->
# TODO: Should prob make this a default for easier reading
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
position: (options) ->
modal = jQuery(this)
modal.css "bottom", "auto"
modalHeight = modal.outerHeight()
modalPadding = modalHeight - modal.height()
win = jQuery(window).height()
threshold = options.threshold
if options.height is "auto"
if modalHeight > (win - (threshold * 2))
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
"margin-top": 0
else
modal.css
top: "50%"
"margin-top": -(modal.outerHeight() / 2)
"margin-left": -(modal.outerWidth() / 2)
else
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
jQuery.fn.modal = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Modal"
) jQuery | true | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 PI:NAME:<NAME>END_PI (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
title: "Alert"
id: "modal"
closeID: "closeDialog"
overlay: true
overlayMrk: "<div class='pageOverlay'></div>"
close_on_overlay: false
enter_to_submit: false
appendTo: "body"
animation: ""
threshold: 15
ajax: ""
ajaxTarget: ""
autoresize: true
notify: false
width: "auto"
height: "auto"
header: true
headerContent: ""
footer: true
removeContent: false
buttons: [
buttonText: "Ok"
callback: ->
return
defaultButton: true
]
closeCallback: jQuery.noop
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
afterAjax: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
jQuery(this).modal "_open", options
_open: (options) ->
options.beforeOpen()
overlay = ""
isMobile = ""
buildModal = undefined
buttons = ""
jQuery.each options.buttons, (i, btn) ->
defaultBtn = ""
defaultBtn = " defaultButton" unless typeof btn.defaultButton is "undefined"
className = ""
className = btn.className unless typeof btn.className is "undefined"
buttons += "<a href=\"javascript:;\" id=\"modalBtn" + i + "\" class=\"" + className + defaultBtn + "\"><span>" + btn.buttonText + "</span></a>"
options.width = options.width + "px" unless options.width is "auto"
overlay = options.overlayMrk if options.overlay
buildModal = "<div id=\"" + options.id + "\" class=\"modal_wrapper " + isMobile + " " + options.height + "\">" + overlay + "<div style=\"width:" + options.width + ";\" class=\"modal\">"
if options.header
buildModal += "<h1><span class=\"wrapper\"><span class=\"title\">" + options.title + "</span><a href=\"javascript:;\" id=\"" + options.closeID + "\" class=\"closeDialog " + options.closeClass + "\">x</a></span></h1>"
else
buttons += "<a href=\"javascript:;\" class=\"closeDialog\"><span>Cancel</span></a>"
buildModal += "<div class=\"modalBody\"><div class=\"wrapper\"><div class=\"d_content\"></div></div></div>"
buildModal += "<div class=\"modalFooter\"><span class=\"wrapper\">" + buttons + "</span></div>" if options.footer
buildModal += "</div></div>"
jQuery(options.appendTo).append buildModal
jQuery(options.headerContent).appendTo("#" + options.id + " h1 .wrapper").css "display", "block" if options.headerContent and options.header
jQuery(this).appendTo("#" + options.id + " .d_content").css "display", "block"
jQuery(this).modal "ajax", options if options.ajax
jQuery("#" + options.id + " .modal").modal "position", options
jQuery("#" + options.id).modal "_setupEvents", options
jQuery(options.appendTo).addClass "modal_open"
options.afterOpen.call()
ajax: (options) ->
_this = jQuery(this)
options.ajax = options.ajax + " " + options.ajaxTarget if options.ajaxTarget
if options.notify
jQuery(_this).parents(".modalBody").notify
style: "none"
position: "middle"
loading: "circles"
sticky: true
content: "hidden"
_this.load options.ajax, (response, status, xhr) =>
jQuery(_this).parents(".modalBody").find(".notify").notify "close"
setTimeout =>
options.afterAjax.call()
_this.parents(".modal:first").modal "position", options
, 100
if status is "error"
msg = "Sorry but there was an error: "
alert msg + xhr.status + " " + xhr.statusText
close: (options={}) ->
options = jQuery.extend({}, methods.defaults, options) # TODO: This is not extending!
modal_content = jQuery(this).find(".d_content").children()
parent = jQuery(this).parents(".modal_wrapper")
m = jQuery(".modal")
remove_and_clean = ->
unless options.removeContent
modal_content.appendTo(options.appendTo).css "display", "none"
jQuery(options.headerContent).appendTo(options.appendTo).css "display", "none" if options.headerContent and options.header
else
modal_content.remove()
unless m[1]
jQuery(options.appendTo).removeClass "modal_close"
jQuery(window).unbind "resize.modal"
parent.remove()
jQuery(this).unbind 'oanimationend animationend webkitAnimationEnd'
options.afterClose()
options.beforeClose()
# Checking for multiple open
if m[1]
jQuery(m[m.length - 1]).fadeOut "fast", =>
remove_and_clean()
else
jQuery(this).bind 'oanimationend animationend webkitAnimationEnd', =>
remove_and_clean()
jQuery(options.appendTo).removeClass("modal_open").addClass "modal_close"
_setupEvents: (options) ->
_this = jQuery(this)
if options.autoresize
jQuery(window).bind "resize.modal", ->
_this.find(".modal").modal "position", options
if options.enter_to_submit
form = jQuery(this).find 'form'
jQuery(document).bind('keypress', (e) =>
code = e.keyCode || e.which
if code == 13
form.submit()
options.submit_callback() if (typeof options.submit_callback is not "undefined")
)
if options.overlay and options.close_on_overlay
parser = new DOMParser()
over = parser.parseFromString(options.overlayMrk, "text/xml");
klass = jQuery(over.firstChild).attr('class')
jQuery("." + klass).click( =>
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
)
jQuery(this).find(".modalFooter a:not(.closeDialog)").each( (i, e) =>
jQuery(e).click( =>
if (options.buttons[i].context)
options.buttons[i].callback.apply options.buttons[i].context
else
options.buttons[i].callback()
)
)
jQuery(this).find(".closeDialog").click ->
# TODO: Should prob make this a default for easier reading
(if (options.closeCallback == $.noop) then jQuery("#" + options.id + " .modal").modal("close", options) else options.closeCallback())
position: (options) ->
modal = jQuery(this)
modal.css "bottom", "auto"
modalHeight = modal.outerHeight()
modalPadding = modalHeight - modal.height()
win = jQuery(window).height()
threshold = options.threshold
if options.height is "auto"
if modalHeight > (win - (threshold * 2))
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
"margin-top": 0
else
modal.css
top: "50%"
"margin-top": -(modal.outerHeight() / 2)
"margin-left": -(modal.outerWidth() / 2)
else
modal.css
top: threshold
bottom: threshold
"margin-left": -(modal.outerWidth() / 2)
jQuery.fn.modal = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Modal"
) jQuery |
[
{
"context": "lePath = __dirname + \"/../../src\"\n\nplayers = [\n 'Jombocom'\n 'Carple'\n 'Danret'\n 'Goop'\n 'Jeut'\n 'Axce'",
"end": 298,
"score": 0.8416978716850281,
"start": 290,
"tag": "NAME",
"value": "Jombocom"
},
{
"context": "rname + \"/../../src\"\n\nplayers = [\n '... | ext/local-test/LocalTest.coffee | sadbear-/IdleLands | 0 |
finder = require "fs-finder"
watch = require "node-watch"
colors = require "cli-color"
_ = require "lodash"
#### GAME CONSTANTS ####
# change this if you want the console game to go faster
DELAY_INTERVAL = 1
########################
idlePath = __dirname + "/../../src"
players = [
'Jombocom'
'Carple'
'Danret'
'Goop'
'Jeut'
'Axce'
'Groat'
'Jack'
'Xefe'
'Ooola'
'Getry'
'Seripity'
'Tence'
'Rawgle'
'Plez'
'Zep'
'Shet'
'Lord Sirpy'
'Sir Pipe'
'Pleb'
'Rekter'
'Pilu'
]
hashes = []
playerHash = {}
## utility functions ##
uniqueId = (playerName) ->
"local-server/#{playerName}"
buildHashes = ->
hashes = _.each players, (player) ->
playerHash[uniqueId player] = player
.map (player) -> uniqueId player
broadcast = (message) ->
console.log message
broadcastHandler = (message) ->
broadcast message
## ## ## ## ## ## ## ##
interval = null
IdleWrapper = require(idlePath+"/system/ExternalWrapper")()
w = getWrapper = -> IdleWrapper
api = -> w().api
inst = -> api().gameInstance
pm = -> inst().playerManager
player = -> api().player
game = -> api().game
gm = -> api().gm
colorMap =
"player.name": colors.bold
"event.partyName": colors.bold
"event.partyMembers": colors.bold
"event.player": colors.bold
"event.damage": colors.red
"event.gold": colors.yellowBright
"event.realGold": colors.yellowBright
"event.shopGold": colors.yellowBright
"event.xp": colors.green
"event.realXp": colors.green
"event.percentXp": colors.green
"event.item.newbie": colors.whiteBright
"event.item.Normal": colors.whiteBright
"event.item.basic": colors.whiteBright
"event.item.pro": colors.white
"event.item.idle": colors.cyan
"event.item.godly": colors.cyanBright
"event.item.custom": colors.cyanBright
"event.item.guardian": colors.cyan
"event.finditem.scoreboost": colors.bold
"event.finditem.perceived": colors.bold
"event.finditem.real": colors.bold
"event.blessItem.stat": colors.bold
"event.blessItem.value": colors.bold
"event.flip.stat": colors.bold
"event.flip.value": colors.bold
"event.enchant.boost": colors.bold
"event.enchant.stat": colors.bold
"event.tinker.boost": colors.bold
"event.tinker.stat": colors.bold
"event.transfer.destination": colors.bold
"event.transfer.from": colors.bold
"player.class": colors.bold
"player.level": colors.bold
"stats.hp": colors.red
"stats.mp": colors.blue
"stats.sp": colors.yellow
"damage.hp": colors.red
"damage.mp": colors.blue
"spell.turns": colors.bold
"spell.spellName": colors.bold.underline
"event.casterName": colors.bold
"event.spellName": colors.bold.underline
"event.targetName": colors.bold
"event.achievement": colors.bold
"event.guildName": colors.bold.underline
## API call functions ##
loadIdle = ->
try
IdleWrapper.load()
IdleWrapper.api.game.handlers.colorMap colorMap
IdleWrapper.api.game.handlers.broadcastHandler broadcastHandler, null
do loadAllPlayers
catch e
console.error e
registerAllPlayers = ->
_.each hashes, (playerHashInList) ->
IdleWrapper.api.player.auth.register {identifier: playerHashInList, name: playerHash[playerHashInList]}, null
loadAllPlayers = ->
_.each hashes, (playerHash) ->
IdleWrapper.api.player.auth.login playerHash
adjustSpeed = ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
IdleWrapper.api.gameInstance.playerManager.DELAY_INTERVAL = DELAY_INTERVAL
IdleWrapper.api.gameInstance.playerManager.beginGameLoop()
gameLoop = ->
doActionPerMember = (arr, action) ->
for i in [0...arr.length]
setTimeout (player, i) ->
action player
, DELAY_INTERVAL/arr.length*i, arr[i]
interval = setInterval ->
doActionPerMember hashes, IdleWrapper.api.player.takeTurn
, DELAY_INTERVAL
interactiveSession = ->
readline = require 'readline'
cli = readline.createInterface process.stdin, process.stdout, null
cli.on 'line', (line) ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
cli.setPrompt "halted: c to continue> "
if line is ""
cli.prompt()
else if line is "c"
do IdleWrapper.api.gameInstance.playerManager.beginGameLoop
do gameLoop
else
try
broadcast "Evaluating `#{line}`"
result = eval line
broadcast result
result?.then? (res) -> broadcast res.message
catch error
console.error error.name, error.message, error.stack
cli.prompt()
## ## ## ## ## ## ## ##
## other functions ##
watchIdleFiles = ->
loadFunction = _.debounce loadIdle, 100
watch idlePath, {}, ->
files = finder.from(idlePath).findFiles "*.coffee"
_.each files, (file) ->
delete require.cache[file]
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
loadFunction()
#####################
## ## initial load ## ##
do buildHashes
do loadIdle
do registerAllPlayers
do loadAllPlayers
do watchIdleFiles
do adjustSpeed
do gameLoop
do interactiveSession
| 160886 |
finder = require "fs-finder"
watch = require "node-watch"
colors = require "cli-color"
_ = require "lodash"
#### GAME CONSTANTS ####
# change this if you want the console game to go faster
DELAY_INTERVAL = 1
########################
idlePath = __dirname + "/../../src"
players = [
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
'<NAME>'
]
hashes = []
playerHash = {}
## utility functions ##
uniqueId = (playerName) ->
"local-server/#{playerName}"
buildHashes = ->
hashes = _.each players, (player) ->
playerHash[uniqueId player] = player
.map (player) -> uniqueId player
broadcast = (message) ->
console.log message
broadcastHandler = (message) ->
broadcast message
## ## ## ## ## ## ## ##
interval = null
IdleWrapper = require(idlePath+"/system/ExternalWrapper")()
w = getWrapper = -> IdleWrapper
api = -> w().api
inst = -> api().gameInstance
pm = -> inst().playerManager
player = -> api().player
game = -> api().game
gm = -> api().gm
colorMap =
"player.name": colors.bold
"event.partyName": colors.bold
"event.partyMembers": colors.bold
"event.player": colors.bold
"event.damage": colors.red
"event.gold": colors.yellowBright
"event.realGold": colors.yellowBright
"event.shopGold": colors.yellowBright
"event.xp": colors.green
"event.realXp": colors.green
"event.percentXp": colors.green
"event.item.newbie": colors.whiteBright
"event.item.Normal": colors.whiteBright
"event.item.basic": colors.whiteBright
"event.item.pro": colors.white
"event.item.idle": colors.cyan
"event.item.godly": colors.cyanBright
"event.item.custom": colors.cyanBright
"event.item.guardian": colors.cyan
"event.finditem.scoreboost": colors.bold
"event.finditem.perceived": colors.bold
"event.finditem.real": colors.bold
"event.blessItem.stat": colors.bold
"event.blessItem.value": colors.bold
"event.flip.stat": colors.bold
"event.flip.value": colors.bold
"event.enchant.boost": colors.bold
"event.enchant.stat": colors.bold
"event.tinker.boost": colors.bold
"event.tinker.stat": colors.bold
"event.transfer.destination": colors.bold
"event.transfer.from": colors.bold
"player.class": colors.bold
"player.level": colors.bold
"stats.hp": colors.red
"stats.mp": colors.blue
"stats.sp": colors.yellow
"damage.hp": colors.red
"damage.mp": colors.blue
"spell.turns": colors.bold
"spell.spellName": colors.bold.underline
"event.casterName": colors.bold
"event.spellName": colors.bold.underline
"event.targetName": colors.bold
"event.achievement": colors.bold
"event.guildName": colors.bold.underline
## API call functions ##
loadIdle = ->
try
IdleWrapper.load()
IdleWrapper.api.game.handlers.colorMap colorMap
IdleWrapper.api.game.handlers.broadcastHandler broadcastHandler, null
do loadAllPlayers
catch e
console.error e
registerAllPlayers = ->
_.each hashes, (playerHashInList) ->
IdleWrapper.api.player.auth.register {identifier: playerHashInList, name: playerHash[playerHashInList]}, null
loadAllPlayers = ->
_.each hashes, (playerHash) ->
IdleWrapper.api.player.auth.login playerHash
adjustSpeed = ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
IdleWrapper.api.gameInstance.playerManager.DELAY_INTERVAL = DELAY_INTERVAL
IdleWrapper.api.gameInstance.playerManager.beginGameLoop()
gameLoop = ->
doActionPerMember = (arr, action) ->
for i in [0...arr.length]
setTimeout (player, i) ->
action player
, DELAY_INTERVAL/arr.length*i, arr[i]
interval = setInterval ->
doActionPerMember hashes, IdleWrapper.api.player.takeTurn
, DELAY_INTERVAL
interactiveSession = ->
readline = require 'readline'
cli = readline.createInterface process.stdin, process.stdout, null
cli.on 'line', (line) ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
cli.setPrompt "halted: c to continue> "
if line is ""
cli.prompt()
else if line is "c"
do IdleWrapper.api.gameInstance.playerManager.beginGameLoop
do gameLoop
else
try
broadcast "Evaluating `#{line}`"
result = eval line
broadcast result
result?.then? (res) -> broadcast res.message
catch error
console.error error.name, error.message, error.stack
cli.prompt()
## ## ## ## ## ## ## ##
## other functions ##
watchIdleFiles = ->
loadFunction = _.debounce loadIdle, 100
watch idlePath, {}, ->
files = finder.from(idlePath).findFiles "*.coffee"
_.each files, (file) ->
delete require.cache[file]
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
loadFunction()
#####################
## ## initial load ## ##
do buildHashes
do loadIdle
do registerAllPlayers
do loadAllPlayers
do watchIdleFiles
do adjustSpeed
do gameLoop
do interactiveSession
| true |
finder = require "fs-finder"
watch = require "node-watch"
colors = require "cli-color"
_ = require "lodash"
#### GAME CONSTANTS ####
# change this if you want the console game to go faster
DELAY_INTERVAL = 1
########################
idlePath = __dirname + "/../../src"
players = [
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
]
hashes = []
playerHash = {}
## utility functions ##
uniqueId = (playerName) ->
"local-server/#{playerName}"
buildHashes = ->
hashes = _.each players, (player) ->
playerHash[uniqueId player] = player
.map (player) -> uniqueId player
broadcast = (message) ->
console.log message
broadcastHandler = (message) ->
broadcast message
## ## ## ## ## ## ## ##
interval = null
IdleWrapper = require(idlePath+"/system/ExternalWrapper")()
w = getWrapper = -> IdleWrapper
api = -> w().api
inst = -> api().gameInstance
pm = -> inst().playerManager
player = -> api().player
game = -> api().game
gm = -> api().gm
colorMap =
"player.name": colors.bold
"event.partyName": colors.bold
"event.partyMembers": colors.bold
"event.player": colors.bold
"event.damage": colors.red
"event.gold": colors.yellowBright
"event.realGold": colors.yellowBright
"event.shopGold": colors.yellowBright
"event.xp": colors.green
"event.realXp": colors.green
"event.percentXp": colors.green
"event.item.newbie": colors.whiteBright
"event.item.Normal": colors.whiteBright
"event.item.basic": colors.whiteBright
"event.item.pro": colors.white
"event.item.idle": colors.cyan
"event.item.godly": colors.cyanBright
"event.item.custom": colors.cyanBright
"event.item.guardian": colors.cyan
"event.finditem.scoreboost": colors.bold
"event.finditem.perceived": colors.bold
"event.finditem.real": colors.bold
"event.blessItem.stat": colors.bold
"event.blessItem.value": colors.bold
"event.flip.stat": colors.bold
"event.flip.value": colors.bold
"event.enchant.boost": colors.bold
"event.enchant.stat": colors.bold
"event.tinker.boost": colors.bold
"event.tinker.stat": colors.bold
"event.transfer.destination": colors.bold
"event.transfer.from": colors.bold
"player.class": colors.bold
"player.level": colors.bold
"stats.hp": colors.red
"stats.mp": colors.blue
"stats.sp": colors.yellow
"damage.hp": colors.red
"damage.mp": colors.blue
"spell.turns": colors.bold
"spell.spellName": colors.bold.underline
"event.casterName": colors.bold
"event.spellName": colors.bold.underline
"event.targetName": colors.bold
"event.achievement": colors.bold
"event.guildName": colors.bold.underline
## API call functions ##
loadIdle = ->
try
IdleWrapper.load()
IdleWrapper.api.game.handlers.colorMap colorMap
IdleWrapper.api.game.handlers.broadcastHandler broadcastHandler, null
do loadAllPlayers
catch e
console.error e
registerAllPlayers = ->
_.each hashes, (playerHashInList) ->
IdleWrapper.api.player.auth.register {identifier: playerHashInList, name: playerHash[playerHashInList]}, null
loadAllPlayers = ->
_.each hashes, (playerHash) ->
IdleWrapper.api.player.auth.login playerHash
adjustSpeed = ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
IdleWrapper.api.gameInstance.playerManager.DELAY_INTERVAL = DELAY_INTERVAL
IdleWrapper.api.gameInstance.playerManager.beginGameLoop()
gameLoop = ->
doActionPerMember = (arr, action) ->
for i in [0...arr.length]
setTimeout (player, i) ->
action player
, DELAY_INTERVAL/arr.length*i, arr[i]
interval = setInterval ->
doActionPerMember hashes, IdleWrapper.api.player.takeTurn
, DELAY_INTERVAL
interactiveSession = ->
readline = require 'readline'
cli = readline.createInterface process.stdin, process.stdout, null
cli.on 'line', (line) ->
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
cli.setPrompt "halted: c to continue> "
if line is ""
cli.prompt()
else if line is "c"
do IdleWrapper.api.gameInstance.playerManager.beginGameLoop
do gameLoop
else
try
broadcast "Evaluating `#{line}`"
result = eval line
broadcast result
result?.then? (res) -> broadcast res.message
catch error
console.error error.name, error.message, error.stack
cli.prompt()
## ## ## ## ## ## ## ##
## other functions ##
watchIdleFiles = ->
loadFunction = _.debounce loadIdle, 100
watch idlePath, {}, ->
files = finder.from(idlePath).findFiles "*.coffee"
_.each files, (file) ->
delete require.cache[file]
clearInterval IdleWrapper.api.gameInstance.playerManager.interval
clearInterval interval
loadFunction()
#####################
## ## initial load ## ##
do buildHashes
do loadIdle
do registerAllPlayers
do loadAllPlayers
do watchIdleFiles
do adjustSpeed
do gameLoop
do interactiveSession
|
[
{
"context": "n for duelyst-dev.firebaseio.com\nfirebaseToken = 'AxTA1RfsIzL2hDUOmYyFXQ9VAjnc86EqZ4n8LvxJ'\nfbRef = new Firebase('https://duelyst-alpha.fire",
"end": 289,
"score": 0.9940870404243469,
"start": 249,
"tag": "KEY",
"value": "AxTA1RfsIzL2hDUOmYyFXQ9VAjnc86EqZ4n8LvxJ"
},
{
... | scripts/add_index.coffee | willroberts/duelyst | 5 | Firebase = require 'firebase'
_ = require 'underscore'
fbUtil = require '../app/common/utils/utils_firebase.js'
# Configuration object
config = require '../config/config.js'
# Firebase secure token for duelyst-dev.firebaseio.com
firebaseToken = 'AxTA1RfsIzL2hDUOmYyFXQ9VAjnc86EqZ4n8LvxJ'
fbRef = new Firebase('https://duelyst-alpha.firebaseio.com/')
#fbRef = new Firebase(config.get('firebase'))
fbRef.auth firebaseToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our database.")
process.exit(1)
fbAuthRef = new Firebase('https://duelyst-alpha-auth.firebaseio.com/')
authToken = '3UyCSPCLvTBR7zSzUL4Z0hkJB1YcrXK86SNcB3pE'
# Our Firebase with auth data is read-only by admin so we authenticate
# auth is cached by Firebase for future requests
fbAuthRef.auth authToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our user database.")
process.exit(1)
getAllEmails = (cb) ->
fbAuthRef.child('user').once 'value', (snapshot) ->
data = snapshot.val()
emails = {}
for user of data
email = data[user].email
emails[email] = user
cb(emails)
createIndex = (email, id) ->
escapedEmail = fbUtil.escapeEmail(email)
fbRef.child('email-index').child(escapedEmail).set id, (error) ->
if error
console("Failed to set index for: " + email)
else
console.log("Index created for: " + email)
getAllEmails (result) ->
_.map result, (id, email) ->
createIndex(email, id)
| 41288 | Firebase = require 'firebase'
_ = require 'underscore'
fbUtil = require '../app/common/utils/utils_firebase.js'
# Configuration object
config = require '../config/config.js'
# Firebase secure token for duelyst-dev.firebaseio.com
firebaseToken = '<KEY>'
fbRef = new Firebase('https://duelyst-alpha.firebaseio.com/')
#fbRef = new Firebase(config.get('firebase'))
fbRef.auth firebaseToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our database.")
process.exit(1)
fbAuthRef = new Firebase('https://duelyst-alpha-auth.firebaseio.com/')
authToken = '<KEY>'
# Our Firebase with auth data is read-only by admin so we authenticate
# auth is cached by Firebase for future requests
fbAuthRef.auth authToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our user database.")
process.exit(1)
getAllEmails = (cb) ->
fbAuthRef.child('user').once 'value', (snapshot) ->
data = snapshot.val()
emails = {}
for user of data
email = data[user].email
emails[email] = user
cb(emails)
createIndex = (email, id) ->
escapedEmail = fbUtil.escapeEmail(email)
fbRef.child('email-index').child(escapedEmail).set id, (error) ->
if error
console("Failed to set index for: " + email)
else
console.log("Index created for: " + email)
getAllEmails (result) ->
_.map result, (id, email) ->
createIndex(email, id)
| true | Firebase = require 'firebase'
_ = require 'underscore'
fbUtil = require '../app/common/utils/utils_firebase.js'
# Configuration object
config = require '../config/config.js'
# Firebase secure token for duelyst-dev.firebaseio.com
firebaseToken = 'PI:KEY:<KEY>END_PI'
fbRef = new Firebase('https://duelyst-alpha.firebaseio.com/')
#fbRef = new Firebase(config.get('firebase'))
fbRef.auth firebaseToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our database.")
process.exit(1)
fbAuthRef = new Firebase('https://duelyst-alpha-auth.firebaseio.com/')
authToken = 'PI:KEY:<KEY>END_PI'
# Our Firebase with auth data is read-only by admin so we authenticate
# auth is cached by Firebase for future requests
fbAuthRef.auth authToken, (error) ->
if error
# Failed to connect to our secure user database
console.log("Error authenticating against our user database.")
process.exit(1)
getAllEmails = (cb) ->
fbAuthRef.child('user').once 'value', (snapshot) ->
data = snapshot.val()
emails = {}
for user of data
email = data[user].email
emails[email] = user
cb(emails)
createIndex = (email, id) ->
escapedEmail = fbUtil.escapeEmail(email)
fbRef.child('email-index').child(escapedEmail).set id, (error) ->
if error
console("Failed to set index for: " + email)
else
console.log("Index created for: " + email)
getAllEmails (result) ->
_.map result, (id, email) ->
createIndex(email, id)
|
[
{
"context": " uuid: 'some-uuid'\n token: 'some-token'\n rawData: '{}'\n\n @sut.do request",
"end": 935,
"score": 0.8297185301780701,
"start": 925,
"tag": "PASSWORD",
"value": "some-token"
},
{
"context": "nManager.verifyToken { uuid: 'some-uuid'... | test/migrate-root-token-spec.coffee | octoblu/meshblu-core-task-migrate-root-token | 0 | Datastore = require 'meshblu-core-datastore'
TokenManager = require 'meshblu-core-manager-token'
mongojs = require 'mongojs'
MigrateRootToken = require '../'
describe 'MigrateRootToken', ->
beforeEach (done) ->
@uuidAliasResolver = resolve: (uuid, callback) => callback null, uuid
database = mongojs 'meshblu-core-task-check-token', ['tokens']
@datastore = new Datastore
database: database
collection: 'tokens'
database.tokens.remove done
beforeEach ->
pepper = 'totally-a-secret'
@sut = new MigrateRootToken { @datastore, pepper, @uuidAliasResolver }
@tokenManager = new TokenManager { @datastore, pepper, @uuidAliasResolver }
describe '->do', ->
describe 'when called', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
auth:
uuid: 'some-uuid'
token: 'some-token'
rawData: '{}'
@sut.do request, (error, @response) => done error
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
expect(@response).to.deep.equal expectedResponse
it 'should be a valid token', (done) ->
@tokenManager.verifyToken { uuid: 'some-uuid', token: 'some-token' }, (error, valid) =>
return done error if error?
expect(valid).to.be.true
done()
it 'should have root: true in the database', (done) ->
@datastore.findOne { uuid: 'some-uuid' }, (error, record) =>
return done error if error?
expect(record.root).to.be.true
done()
| 184685 | Datastore = require 'meshblu-core-datastore'
TokenManager = require 'meshblu-core-manager-token'
mongojs = require 'mongojs'
MigrateRootToken = require '../'
describe 'MigrateRootToken', ->
beforeEach (done) ->
@uuidAliasResolver = resolve: (uuid, callback) => callback null, uuid
database = mongojs 'meshblu-core-task-check-token', ['tokens']
@datastore = new Datastore
database: database
collection: 'tokens'
database.tokens.remove done
beforeEach ->
pepper = 'totally-a-secret'
@sut = new MigrateRootToken { @datastore, pepper, @uuidAliasResolver }
@tokenManager = new TokenManager { @datastore, pepper, @uuidAliasResolver }
describe '->do', ->
describe 'when called', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
auth:
uuid: 'some-uuid'
token: '<PASSWORD>'
rawData: '{}'
@sut.do request, (error, @response) => done error
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
expect(@response).to.deep.equal expectedResponse
it 'should be a valid token', (done) ->
@tokenManager.verifyToken { uuid: 'some-uuid', token: '<PASSWORD>' }, (error, valid) =>
return done error if error?
expect(valid).to.be.true
done()
it 'should have root: true in the database', (done) ->
@datastore.findOne { uuid: 'some-uuid' }, (error, record) =>
return done error if error?
expect(record.root).to.be.true
done()
| true | Datastore = require 'meshblu-core-datastore'
TokenManager = require 'meshblu-core-manager-token'
mongojs = require 'mongojs'
MigrateRootToken = require '../'
describe 'MigrateRootToken', ->
beforeEach (done) ->
@uuidAliasResolver = resolve: (uuid, callback) => callback null, uuid
database = mongojs 'meshblu-core-task-check-token', ['tokens']
@datastore = new Datastore
database: database
collection: 'tokens'
database.tokens.remove done
beforeEach ->
pepper = 'totally-a-secret'
@sut = new MigrateRootToken { @datastore, pepper, @uuidAliasResolver }
@tokenManager = new TokenManager { @datastore, pepper, @uuidAliasResolver }
describe '->do', ->
describe 'when called', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
auth:
uuid: 'some-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
rawData: '{}'
@sut.do request, (error, @response) => done error
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
expect(@response).to.deep.equal expectedResponse
it 'should be a valid token', (done) ->
@tokenManager.verifyToken { uuid: 'some-uuid', token: 'PI:PASSWORD:<PASSWORD>END_PI' }, (error, valid) =>
return done error if error?
expect(valid).to.be.true
done()
it 'should have root: true in the database', (done) ->
@datastore.findOne { uuid: 'some-uuid' }, (error, record) =>
return done error if error?
expect(record.root).to.be.true
done()
|
[
{
"context": "ntroller extends Batman.Controller\n routingKey: 'game'\n\n constructor: ->\n super\n @iceServers = [",
"end": 73,
"score": 0.9770106077194214,
"start": 69,
"tag": "KEY",
"value": "game"
}
] | coffee/controllers/game_controller.coffee | frustra/tetrus | 0 | class Tetrus.GameController extends Batman.Controller
routingKey: 'game'
constructor: ->
super
@iceServers = ['stun:stun.l.google.com:19302']
play: ->
@peer = new Tetrus.Peer(Tetrus.get('peer'))
@game = new Tetrus.Game
@connectionParams = [
{ DtlsSrtpKeyAgreement: true }
]
if @peer.get('session').type == 'rtp'
@connectionParams.push { RtpDataChannels: true }
else if @peer.get('session').type != 'sctp'
console.error "Invalid connection type", @peer.get('session').type
Tetrus.Flash.error("Invalid connection type")
return
@set('isServer', @peer.get('isServer'))
@_negotiate()
start: ->
@pollForTimeout()
@game.create()
@game.player.on 'piece:change', =>
piece = @game.player.piece
storage = piece.storageWithAlpha(Tetrus.Piece.peerAlpha)
@send(type: 'piece:update', piece: { storage: storage, position: piece.position, width: piece.width, height: piece.height })
@game.player.on 'piece:move', =>
@send(type: 'piece:move', piece: { position: @game.player.piece.position })
@game.on 'piece:place', =>
if @isServer
@send(type: 'board:update', board: { storage: @game.board.storage })
else
piece = @game.player.piece
@send(type: 'piece:place', piece: { storage: piece.storage, position: piece.position, width: piece.width, height: piece.height })
@game.on 'game:over', =>
@send(type: 'game:lose')
@stop()
setTimeout(@disconnect, 5000)
$(document).on('keydown.game', @keydown).on('keyup.game', @keyup)
@game.player.setNextPiece()
@game.fire('game:ready')
@game.start()
stop: ->
@game.stop()
$(document).off('keydown.game').off('keyup.game')
disconnect: =>
@set('connecting', false)
@set('connected', false)
@peerChannel?.close()
@peerConnection?.close()
delete @peerChannel
delete @peerConnection
if @_onServerMessage
Tetrus.off 'socket:message', @_onServerMessage
delete @_onServerMessage
Tetrus.conn.sendJSON(command: 'game:end')
Batman.redirect('/lobby')
_onMessage: (event) ->
@lastResponse = new Date().getTime()
message = JSON.parse(event.data)
switch message.type
when "ping"
@send(type: 'pong', timeStamp: event.timeStamp)
when "pong"
@set('rtt', event.timeStamp - message.timeStamp)
when "board:update"
@game.board.apply(message.board)
when "piece:update"
@game.peer.piece.apply(message.piece)
when "piece:move"
@game.peer.piece.position = message.piece.position
when "piece:place"
piece = new Tetrus.Piece
piece.apply(message.piece)
@game.placePiece(piece)
when "score"
@game.speed += message.deltaSpeed
@game.score += message.deltaScore
@game.board.removeLine(line) for line in message.lines
when "game:lose"
@game.lose()
else
console.error(message)
Tetrus.Flash.error("Communication Error")
@disconnect()
return
send: (message) ->
try
@peerChannel.send(JSON.stringify(message))
catch
Tetrus.Flash.error("Communication Error")
@stop()
@disconnect()
pollForTimeout: ->
lastCheck = 0
@lastResponse = new Date().getTime()
check = =>
return unless @connected
if @lastResponse < lastCheck - 500
Tetrus.Flash.error("Connection timed out")
@disconnect()
else
lastCheck = new Date().getTime()
setTimeout check, 2000
@send(type: 'ping')
check()
keydown: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, true)
keyup: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, false)
_bindPeerChannel: (channel) ->
@peerChannel = channel
channel.onmessage = (event) => @_onMessage(event)
channel.onopen = =>
Batman.developer.log("peer channel opened with protocol", channel.protocol)
@set('connecting', false)
@set('connected', true)
@start()
channel.onclose = =>
Batman.developer.log("peer channel closed")
@disconnect()
channel.onerror = (error) =>
Batman.developer.log("peer channel errored:", error)
@disconnect()
_negotiate: ->
@set('connecting', true)
@set('connected', false)
iceServers = ({ url: x } for x in @iceServers)
@peerConnection = new RTCPeerConnection({iceServers}, {optional: @connectionParams})
candidates = []
@peerConnection.onicecandidate = (event) =>
if candidate = event.candidate
Batman.developer.log("local candidate", candidate.candidate)
candidates.push(candidate)
@peerConnection.ondatachannel = (event) => @_bindPeerChannel(event.channel)
if @isServer
@_bindPeerChannel(@peerConnection.createDataChannel('RTCDataChannel'))
@peerConnection.createOffer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:offer', description: description, username: @peer.get('username'))
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
Tetrus.on 'socket:message', @_onServerMessage = (message) =>
setRemoteDescription = =>
description = new RTCSessionDescription(message.description)
@peerConnection.setRemoteDescription(description)
Batman.developer.log("remote sdp", description.sdp)
if @connecting
switch message.type
when "peer:offer"
setRemoteDescription()
@peerConnection.createAnswer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:answer', description: description)
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
when "peer:answer"
setRemoteDescription()
Tetrus.conn.sendJSON(command: 'peer:handshake')
when "peer:handshake:complete"
candidates.push = (candidate) ->
Tetrus.conn.sendJSON(command: 'peer:candidate', candidate: candidate)
candidates.push(candidate) for candidate in candidates
candidates.length = 0
when "peer:candidate"
candidate = new RTCIceCandidate(message.candidate)
@peerConnection.addIceCandidate(candidate)
Batman.developer.log("remote candidate", candidate.candidate)
switch message.type
when "game:ended"
if message.reason
Tetrus.Flash.message(message.reason)
@disconnect()
| 144132 | class Tetrus.GameController extends Batman.Controller
routingKey: '<KEY>'
constructor: ->
super
@iceServers = ['stun:stun.l.google.com:19302']
play: ->
@peer = new Tetrus.Peer(Tetrus.get('peer'))
@game = new Tetrus.Game
@connectionParams = [
{ DtlsSrtpKeyAgreement: true }
]
if @peer.get('session').type == 'rtp'
@connectionParams.push { RtpDataChannels: true }
else if @peer.get('session').type != 'sctp'
console.error "Invalid connection type", @peer.get('session').type
Tetrus.Flash.error("Invalid connection type")
return
@set('isServer', @peer.get('isServer'))
@_negotiate()
start: ->
@pollForTimeout()
@game.create()
@game.player.on 'piece:change', =>
piece = @game.player.piece
storage = piece.storageWithAlpha(Tetrus.Piece.peerAlpha)
@send(type: 'piece:update', piece: { storage: storage, position: piece.position, width: piece.width, height: piece.height })
@game.player.on 'piece:move', =>
@send(type: 'piece:move', piece: { position: @game.player.piece.position })
@game.on 'piece:place', =>
if @isServer
@send(type: 'board:update', board: { storage: @game.board.storage })
else
piece = @game.player.piece
@send(type: 'piece:place', piece: { storage: piece.storage, position: piece.position, width: piece.width, height: piece.height })
@game.on 'game:over', =>
@send(type: 'game:lose')
@stop()
setTimeout(@disconnect, 5000)
$(document).on('keydown.game', @keydown).on('keyup.game', @keyup)
@game.player.setNextPiece()
@game.fire('game:ready')
@game.start()
stop: ->
@game.stop()
$(document).off('keydown.game').off('keyup.game')
disconnect: =>
@set('connecting', false)
@set('connected', false)
@peerChannel?.close()
@peerConnection?.close()
delete @peerChannel
delete @peerConnection
if @_onServerMessage
Tetrus.off 'socket:message', @_onServerMessage
delete @_onServerMessage
Tetrus.conn.sendJSON(command: 'game:end')
Batman.redirect('/lobby')
_onMessage: (event) ->
@lastResponse = new Date().getTime()
message = JSON.parse(event.data)
switch message.type
when "ping"
@send(type: 'pong', timeStamp: event.timeStamp)
when "pong"
@set('rtt', event.timeStamp - message.timeStamp)
when "board:update"
@game.board.apply(message.board)
when "piece:update"
@game.peer.piece.apply(message.piece)
when "piece:move"
@game.peer.piece.position = message.piece.position
when "piece:place"
piece = new Tetrus.Piece
piece.apply(message.piece)
@game.placePiece(piece)
when "score"
@game.speed += message.deltaSpeed
@game.score += message.deltaScore
@game.board.removeLine(line) for line in message.lines
when "game:lose"
@game.lose()
else
console.error(message)
Tetrus.Flash.error("Communication Error")
@disconnect()
return
send: (message) ->
try
@peerChannel.send(JSON.stringify(message))
catch
Tetrus.Flash.error("Communication Error")
@stop()
@disconnect()
pollForTimeout: ->
lastCheck = 0
@lastResponse = new Date().getTime()
check = =>
return unless @connected
if @lastResponse < lastCheck - 500
Tetrus.Flash.error("Connection timed out")
@disconnect()
else
lastCheck = new Date().getTime()
setTimeout check, 2000
@send(type: 'ping')
check()
keydown: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, true)
keyup: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, false)
_bindPeerChannel: (channel) ->
@peerChannel = channel
channel.onmessage = (event) => @_onMessage(event)
channel.onopen = =>
Batman.developer.log("peer channel opened with protocol", channel.protocol)
@set('connecting', false)
@set('connected', true)
@start()
channel.onclose = =>
Batman.developer.log("peer channel closed")
@disconnect()
channel.onerror = (error) =>
Batman.developer.log("peer channel errored:", error)
@disconnect()
_negotiate: ->
@set('connecting', true)
@set('connected', false)
iceServers = ({ url: x } for x in @iceServers)
@peerConnection = new RTCPeerConnection({iceServers}, {optional: @connectionParams})
candidates = []
@peerConnection.onicecandidate = (event) =>
if candidate = event.candidate
Batman.developer.log("local candidate", candidate.candidate)
candidates.push(candidate)
@peerConnection.ondatachannel = (event) => @_bindPeerChannel(event.channel)
if @isServer
@_bindPeerChannel(@peerConnection.createDataChannel('RTCDataChannel'))
@peerConnection.createOffer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:offer', description: description, username: @peer.get('username'))
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
Tetrus.on 'socket:message', @_onServerMessage = (message) =>
setRemoteDescription = =>
description = new RTCSessionDescription(message.description)
@peerConnection.setRemoteDescription(description)
Batman.developer.log("remote sdp", description.sdp)
if @connecting
switch message.type
when "peer:offer"
setRemoteDescription()
@peerConnection.createAnswer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:answer', description: description)
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
when "peer:answer"
setRemoteDescription()
Tetrus.conn.sendJSON(command: 'peer:handshake')
when "peer:handshake:complete"
candidates.push = (candidate) ->
Tetrus.conn.sendJSON(command: 'peer:candidate', candidate: candidate)
candidates.push(candidate) for candidate in candidates
candidates.length = 0
when "peer:candidate"
candidate = new RTCIceCandidate(message.candidate)
@peerConnection.addIceCandidate(candidate)
Batman.developer.log("remote candidate", candidate.candidate)
switch message.type
when "game:ended"
if message.reason
Tetrus.Flash.message(message.reason)
@disconnect()
| true | class Tetrus.GameController extends Batman.Controller
routingKey: 'PI:KEY:<KEY>END_PI'
constructor: ->
super
@iceServers = ['stun:stun.l.google.com:19302']
play: ->
@peer = new Tetrus.Peer(Tetrus.get('peer'))
@game = new Tetrus.Game
@connectionParams = [
{ DtlsSrtpKeyAgreement: true }
]
if @peer.get('session').type == 'rtp'
@connectionParams.push { RtpDataChannels: true }
else if @peer.get('session').type != 'sctp'
console.error "Invalid connection type", @peer.get('session').type
Tetrus.Flash.error("Invalid connection type")
return
@set('isServer', @peer.get('isServer'))
@_negotiate()
start: ->
@pollForTimeout()
@game.create()
@game.player.on 'piece:change', =>
piece = @game.player.piece
storage = piece.storageWithAlpha(Tetrus.Piece.peerAlpha)
@send(type: 'piece:update', piece: { storage: storage, position: piece.position, width: piece.width, height: piece.height })
@game.player.on 'piece:move', =>
@send(type: 'piece:move', piece: { position: @game.player.piece.position })
@game.on 'piece:place', =>
if @isServer
@send(type: 'board:update', board: { storage: @game.board.storage })
else
piece = @game.player.piece
@send(type: 'piece:place', piece: { storage: piece.storage, position: piece.position, width: piece.width, height: piece.height })
@game.on 'game:over', =>
@send(type: 'game:lose')
@stop()
setTimeout(@disconnect, 5000)
$(document).on('keydown.game', @keydown).on('keyup.game', @keyup)
@game.player.setNextPiece()
@game.fire('game:ready')
@game.start()
stop: ->
@game.stop()
$(document).off('keydown.game').off('keyup.game')
disconnect: =>
@set('connecting', false)
@set('connected', false)
@peerChannel?.close()
@peerConnection?.close()
delete @peerChannel
delete @peerConnection
if @_onServerMessage
Tetrus.off 'socket:message', @_onServerMessage
delete @_onServerMessage
Tetrus.conn.sendJSON(command: 'game:end')
Batman.redirect('/lobby')
_onMessage: (event) ->
@lastResponse = new Date().getTime()
message = JSON.parse(event.data)
switch message.type
when "ping"
@send(type: 'pong', timeStamp: event.timeStamp)
when "pong"
@set('rtt', event.timeStamp - message.timeStamp)
when "board:update"
@game.board.apply(message.board)
when "piece:update"
@game.peer.piece.apply(message.piece)
when "piece:move"
@game.peer.piece.position = message.piece.position
when "piece:place"
piece = new Tetrus.Piece
piece.apply(message.piece)
@game.placePiece(piece)
when "score"
@game.speed += message.deltaSpeed
@game.score += message.deltaScore
@game.board.removeLine(line) for line in message.lines
when "game:lose"
@game.lose()
else
console.error(message)
Tetrus.Flash.error("Communication Error")
@disconnect()
return
send: (message) ->
try
@peerChannel.send(JSON.stringify(message))
catch
Tetrus.Flash.error("Communication Error")
@stop()
@disconnect()
pollForTimeout: ->
lastCheck = 0
@lastResponse = new Date().getTime()
check = =>
return unless @connected
if @lastResponse < lastCheck - 500
Tetrus.Flash.error("Connection timed out")
@disconnect()
else
lastCheck = new Date().getTime()
setTimeout check, 2000
@send(type: 'ping')
check()
keydown: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, true)
keyup: (event) => event.preventDefault() unless @game.keys.keyEvent(event.keyCode, false)
_bindPeerChannel: (channel) ->
@peerChannel = channel
channel.onmessage = (event) => @_onMessage(event)
channel.onopen = =>
Batman.developer.log("peer channel opened with protocol", channel.protocol)
@set('connecting', false)
@set('connected', true)
@start()
channel.onclose = =>
Batman.developer.log("peer channel closed")
@disconnect()
channel.onerror = (error) =>
Batman.developer.log("peer channel errored:", error)
@disconnect()
_negotiate: ->
@set('connecting', true)
@set('connected', false)
iceServers = ({ url: x } for x in @iceServers)
@peerConnection = new RTCPeerConnection({iceServers}, {optional: @connectionParams})
candidates = []
@peerConnection.onicecandidate = (event) =>
if candidate = event.candidate
Batman.developer.log("local candidate", candidate.candidate)
candidates.push(candidate)
@peerConnection.ondatachannel = (event) => @_bindPeerChannel(event.channel)
if @isServer
@_bindPeerChannel(@peerConnection.createDataChannel('RTCDataChannel'))
@peerConnection.createOffer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:offer', description: description, username: @peer.get('username'))
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
Tetrus.on 'socket:message', @_onServerMessage = (message) =>
setRemoteDescription = =>
description = new RTCSessionDescription(message.description)
@peerConnection.setRemoteDescription(description)
Batman.developer.log("remote sdp", description.sdp)
if @connecting
switch message.type
when "peer:offer"
setRemoteDescription()
@peerConnection.createAnswer (description) =>
@peerConnection.setLocalDescription(description)
Batman.developer.log("local sdp", description.sdp)
Tetrus.conn.sendJSON(command: 'peer:answer', description: description)
, (err) =>
Tetrus.Flash.error("Failed to negotiate connection")
@disconnect()
when "peer:answer"
setRemoteDescription()
Tetrus.conn.sendJSON(command: 'peer:handshake')
when "peer:handshake:complete"
candidates.push = (candidate) ->
Tetrus.conn.sendJSON(command: 'peer:candidate', candidate: candidate)
candidates.push(candidate) for candidate in candidates
candidates.length = 0
when "peer:candidate"
candidate = new RTCIceCandidate(message.candidate)
@peerConnection.addIceCandidate(candidate)
Batman.developer.log("remote candidate", candidate.candidate)
switch message.type
when "game:ended"
if message.reason
Tetrus.Flash.message(message.reason)
@disconnect()
|
[
{
"context": "class App.User extends Tower.Model\n @field 'firstName'\n @field 'lastName'\n @field 'createdAt', type: ",
"end": 54,
"score": 0.9765043258666992,
"start": 45,
"tag": "NAME",
"value": "firstName"
},
{
"context": "xtends Tower.Model\n @field 'firstName'\n @field 'la... | test/example/app/models/user.coffee | jivagoalves/tower | 1 | class App.User extends Tower.Model
@field 'firstName'
@field 'lastName'
@field 'createdAt', type: 'Time', default: -> new Date()
@field 'likes', type: 'Integer', default: 0
@field 'tags', type: ['Array'], default: []
@field 'admin', type: 'Boolean', default: false
@field 'rating', type: 'Float', default: 2.5
@field 'meta', type: 'Object'
@scope 'byBaldwin', firstName: '=~': 'Baldwin'
@scope 'thisWeek', @where createdAt: '>=': -> require('moment')().subtract('days', 7)
# need to change this...
@hasMany 'posts', type: 'Page', idCache: true # postIds
@hasMany 'articles', type: 'Post', idCache: true
@hasMany 'comments', source: 'commenter'
@hasOne 'address'
@hasMany 'memberships'
@hasMany 'groups', through: 'memberships'
@hasMany 'polymorphicMemberships', as: 'joinable', type: 'Membership'
@hasMany 'cachedMemberships', type: 'Membership', idCache: true
#@hasMany 'dependentMemberships', type: 'DependentMembership', dependent: true
@validates 'firstName', presence: true
@timestamps()
class App.Admin extends App.User
@scope 'subclassNamedScope', likes: '>': 0 | 188522 | class App.User extends Tower.Model
@field '<NAME>'
@field '<NAME>'
@field 'createdAt', type: 'Time', default: -> new Date()
@field 'likes', type: 'Integer', default: 0
@field 'tags', type: ['Array'], default: []
@field 'admin', type: 'Boolean', default: false
@field 'rating', type: 'Float', default: 2.5
@field 'meta', type: 'Object'
@scope 'byBaldwin', firstName: '=~': '<NAME>'
@scope 'thisWeek', @where createdAt: '>=': -> require('moment')().subtract('days', 7)
# need to change this...
@hasMany 'posts', type: 'Page', idCache: true # postIds
@hasMany 'articles', type: 'Post', idCache: true
@hasMany 'comments', source: 'commenter'
@hasOne 'address'
@hasMany 'memberships'
@hasMany 'groups', through: 'memberships'
@hasMany 'polymorphicMemberships', as: 'joinable', type: 'Membership'
@hasMany 'cachedMemberships', type: 'Membership', idCache: true
#@hasMany 'dependentMemberships', type: 'DependentMembership', dependent: true
@validates 'firstName', presence: true
@timestamps()
class App.Admin extends App.User
@scope 'subclassNamedScope', likes: '>': 0 | true | class App.User extends Tower.Model
@field 'PI:NAME:<NAME>END_PI'
@field 'PI:NAME:<NAME>END_PI'
@field 'createdAt', type: 'Time', default: -> new Date()
@field 'likes', type: 'Integer', default: 0
@field 'tags', type: ['Array'], default: []
@field 'admin', type: 'Boolean', default: false
@field 'rating', type: 'Float', default: 2.5
@field 'meta', type: 'Object'
@scope 'byBaldwin', firstName: '=~': 'PI:NAME:<NAME>END_PI'
@scope 'thisWeek', @where createdAt: '>=': -> require('moment')().subtract('days', 7)
# need to change this...
@hasMany 'posts', type: 'Page', idCache: true # postIds
@hasMany 'articles', type: 'Post', idCache: true
@hasMany 'comments', source: 'commenter'
@hasOne 'address'
@hasMany 'memberships'
@hasMany 'groups', through: 'memberships'
@hasMany 'polymorphicMemberships', as: 'joinable', type: 'Membership'
@hasMany 'cachedMemberships', type: 'Membership', idCache: true
#@hasMany 'dependentMemberships', type: 'DependentMembership', dependent: true
@validates 'firstName', presence: true
@timestamps()
class App.Admin extends App.User
@scope 'subclassNamedScope', likes: '>': 0 |
[
{
"context": "body:\n essid: 'Testing'\n password: 'Testing'\n fetch '/ap', data\n .then (res) ->\n ",
"end": 213,
"score": 0.9994117617607117,
"start": 206,
"tag": "PASSWORD",
"value": "Testing"
}
] | device/test/20-ap.coffee | twhtanghk/docker.esp8266 | 1 | describe 'ap', ->
it 'GET /ap', ->
fetch '/ap'
.then ok
it 'PUT /ap check min password length', ->
data = opts
method: 'PUT'
body:
essid: 'Testing'
password: 'Testing'
fetch '/ap', data
.then (res) ->
res.json()
.then (res) ->
expect res
.to.equal 'password min length 8'
| 9071 | describe 'ap', ->
it 'GET /ap', ->
fetch '/ap'
.then ok
it 'PUT /ap check min password length', ->
data = opts
method: 'PUT'
body:
essid: 'Testing'
password: '<PASSWORD>'
fetch '/ap', data
.then (res) ->
res.json()
.then (res) ->
expect res
.to.equal 'password min length 8'
| true | describe 'ap', ->
it 'GET /ap', ->
fetch '/ap'
.then ok
it 'PUT /ap check min password length', ->
data = opts
method: 'PUT'
body:
essid: 'Testing'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
fetch '/ap', data
.then (res) ->
res.json()
.then (res) ->
expect res
.to.equal 'password min length 8'
|
[
{
"context": "n')\n\n repo: Ember.belongsTo('Travis.Repo', key: 'repository_id')\n\n isPropertyLoaded: (key) ->\n if key == 'va",
"end": 210,
"score": 0.8652591705322266,
"start": 197,
"tag": "KEY",
"value": "repository_id"
}
] | assets/scripts/app/models/env_var.coffee | Acidburn0zzz/travis-web | 0 | require 'travis/model'
Travis.EnvVar = Travis.Model.extend
name: Ember.attr('string')
value: Ember.attr('string')
public: Ember.attr('boolean')
repo: Ember.belongsTo('Travis.Repo', key: 'repository_id')
isPropertyLoaded: (key) ->
if key == 'value'
return true
else
@_super(key)
| 46125 | require 'travis/model'
Travis.EnvVar = Travis.Model.extend
name: Ember.attr('string')
value: Ember.attr('string')
public: Ember.attr('boolean')
repo: Ember.belongsTo('Travis.Repo', key: '<KEY>')
isPropertyLoaded: (key) ->
if key == 'value'
return true
else
@_super(key)
| true | require 'travis/model'
Travis.EnvVar = Travis.Model.extend
name: Ember.attr('string')
value: Ember.attr('string')
public: Ember.attr('boolean')
repo: Ember.belongsTo('Travis.Repo', key: 'PI:KEY:<KEY>END_PI')
isPropertyLoaded: (key) ->
if key == 'value'
return true
else
@_super(key)
|
[
{
"context": ",\"fmq\":\"1457507365044_R\",\"ist\":\"\",\"is\":\"\",\"word\":\"途安\",\"sme\":\"\",\"fr\":\"\",\"cs\":\"2464434574,440997798\",\"ct",
"end": 1812,
"score": 0.9287292957305908,
"start": 1810,
"tag": "NAME",
"value": "途安"
}
] | test.coffee | react-china/router-as-view | 3 |
assert = require 'assert'
Immutable = require 'immutable'
pathUtil = require './src/path'
o = Immutable.Map()
fromJS = Immutable.fromJS
testTrimSlash = ->
console.log "* test on trim slash"
assert.equal pathUtil.trimSlash('/a/b/c'), 'a/b/c'
assert.equal pathUtil.trimSlash('/a/b/'), 'a/b'
testQueryParse = ->
console.log '* test on query parser'
result = pathUtil.parseQuery(o, fromJS('a=1&b=2'.split('&')))
expected = {a: '1', b: '2'}
assert.deepEqual result.toJS(), expected
testChineseQueryParse = ->
console.log '* test on Chinese query parser'
text = encodeURIComponent '中文'
result = pathUtil.parseQuery(o, fromJS("#{text}=#{text}".split('&')))
expected = {'中文': '中文'}
assert.deepEqual result.toJS(), expected
testLongQueryParse = ->
console.log '* test on long query parser'
longPath = 'ct=503316480&z=0&ipn=d&word=%E9%80%94%E5%AE%89&step_word=&pn=0&spn=0&di=1712569540&pi=&rn=1&tn=baiduimagedetail&is=&istype=2&ie=utf-8&oe=utf-8&in=&cl=2&lm=-1&st=-1&cs=2464434574%2C440997798&os=1619671487%2C69261469&simid=3496201219%2C355884747&adpicid=0&ln=1000&fr=&fmq=1457507365044_R&fm=&ic=0&s=undefined&se=&sme=&tab=0&width=&height=&face=undefined&ist=&jit=&cg=&bdtype=0&oriquery=&objurl=http%3A%2F%2Fphotocdn.sohu.com%2F20111102%2FImg324265977.jpg&fromurl=ippr_z2C%24qAzdH3FAzdH3F65ss_z%26e3Bf5i7_z%26e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z%26e3Bfip4s&gsm=0'
result = pathUtil.parseQuery(o, fromJS(longPath.split('&')))
expected = {"gsm":"0","lm":"-1","st":"-1","ln":"1000","oriquery":"","adpicid":"0","cg":"","os":"1619671487,69261469","istype":"2","di":"1712569540","in":"","fromurl":"ippr_z2C$qAzdH3FAzdH3F65ss_z&e3Bf5i7_z&e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z&e3Bfip4s","width":"","ipn":"d","fm":"","height":"","cl":"2","fmq":"1457507365044_R","ist":"","is":"","word":"途安","sme":"","fr":"","cs":"2464434574,440997798","ct":"503316480","spn":"0","simid":"3496201219,355884747","se":"","s":"undefined","jit":"","tab":"0","oe":"utf-8","objurl":"http://photocdn.sohu.com/20111102/Img324265977.jpg","pi":"","z":"0","ic":"0","tn":"baiduimagedetail","ie":"utf-8","rn":"1","bdtype":"0","step_word":"","face":"undefined","pn":"0"}
assert.deepEqual result.toJS(), expected
testMakeAddress = ->
console.log '* test make address'
routes = fromJS a: [], b: ['c']
route = fromJS
name: 'b'
data:
c: '1'
query:
a: 'x'
result = pathUtil.makeAddress route, routes
expected = '/b/1?a=x'
assert.equal result, expected
testMakeChineseAddress = ->
console.log '* test make chinese address'
routes = fromJS a: [], '中文': ['name']
route = fromJS
name: '中文'
data:
name: '中文'
query:
'中文': '中文'
result = pathUtil.makeAddress route, routes
expected = '/中文/中文?%E4%B8%AD%E6%96%87=%E4%B8%AD%E6%96%87'
assert.equal result, expected
# Run
exports.run = ->
testTrimSlash()
testQueryParse()
testChineseQueryParse()
testLongQueryParse()
testMakeAddress()
testMakeChineseAddress()
exports.run()
| 7299 |
assert = require 'assert'
Immutable = require 'immutable'
pathUtil = require './src/path'
o = Immutable.Map()
fromJS = Immutable.fromJS
testTrimSlash = ->
console.log "* test on trim slash"
assert.equal pathUtil.trimSlash('/a/b/c'), 'a/b/c'
assert.equal pathUtil.trimSlash('/a/b/'), 'a/b'
testQueryParse = ->
console.log '* test on query parser'
result = pathUtil.parseQuery(o, fromJS('a=1&b=2'.split('&')))
expected = {a: '1', b: '2'}
assert.deepEqual result.toJS(), expected
testChineseQueryParse = ->
console.log '* test on Chinese query parser'
text = encodeURIComponent '中文'
result = pathUtil.parseQuery(o, fromJS("#{text}=#{text}".split('&')))
expected = {'中文': '中文'}
assert.deepEqual result.toJS(), expected
testLongQueryParse = ->
console.log '* test on long query parser'
longPath = 'ct=503316480&z=0&ipn=d&word=%E9%80%94%E5%AE%89&step_word=&pn=0&spn=0&di=1712569540&pi=&rn=1&tn=baiduimagedetail&is=&istype=2&ie=utf-8&oe=utf-8&in=&cl=2&lm=-1&st=-1&cs=2464434574%2C440997798&os=1619671487%2C69261469&simid=3496201219%2C355884747&adpicid=0&ln=1000&fr=&fmq=1457507365044_R&fm=&ic=0&s=undefined&se=&sme=&tab=0&width=&height=&face=undefined&ist=&jit=&cg=&bdtype=0&oriquery=&objurl=http%3A%2F%2Fphotocdn.sohu.com%2F20111102%2FImg324265977.jpg&fromurl=ippr_z2C%24qAzdH3FAzdH3F65ss_z%26e3Bf5i7_z%26e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z%26e3Bfip4s&gsm=0'
result = pathUtil.parseQuery(o, fromJS(longPath.split('&')))
expected = {"gsm":"0","lm":"-1","st":"-1","ln":"1000","oriquery":"","adpicid":"0","cg":"","os":"1619671487,69261469","istype":"2","di":"1712569540","in":"","fromurl":"ippr_z2C$qAzdH3FAzdH3F65ss_z&e3Bf5i7_z&e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z&e3Bfip4s","width":"","ipn":"d","fm":"","height":"","cl":"2","fmq":"1457507365044_R","ist":"","is":"","word":"<NAME>","sme":"","fr":"","cs":"2464434574,440997798","ct":"503316480","spn":"0","simid":"3496201219,355884747","se":"","s":"undefined","jit":"","tab":"0","oe":"utf-8","objurl":"http://photocdn.sohu.com/20111102/Img324265977.jpg","pi":"","z":"0","ic":"0","tn":"baiduimagedetail","ie":"utf-8","rn":"1","bdtype":"0","step_word":"","face":"undefined","pn":"0"}
assert.deepEqual result.toJS(), expected
testMakeAddress = ->
console.log '* test make address'
routes = fromJS a: [], b: ['c']
route = fromJS
name: 'b'
data:
c: '1'
query:
a: 'x'
result = pathUtil.makeAddress route, routes
expected = '/b/1?a=x'
assert.equal result, expected
testMakeChineseAddress = ->
console.log '* test make chinese address'
routes = fromJS a: [], '中文': ['name']
route = fromJS
name: '中文'
data:
name: '中文'
query:
'中文': '中文'
result = pathUtil.makeAddress route, routes
expected = '/中文/中文?%E4%B8%AD%E6%96%87=%E4%B8%AD%E6%96%87'
assert.equal result, expected
# Run
exports.run = ->
testTrimSlash()
testQueryParse()
testChineseQueryParse()
testLongQueryParse()
testMakeAddress()
testMakeChineseAddress()
exports.run()
| true |
assert = require 'assert'
Immutable = require 'immutable'
pathUtil = require './src/path'
o = Immutable.Map()
fromJS = Immutable.fromJS
testTrimSlash = ->
console.log "* test on trim slash"
assert.equal pathUtil.trimSlash('/a/b/c'), 'a/b/c'
assert.equal pathUtil.trimSlash('/a/b/'), 'a/b'
testQueryParse = ->
console.log '* test on query parser'
result = pathUtil.parseQuery(o, fromJS('a=1&b=2'.split('&')))
expected = {a: '1', b: '2'}
assert.deepEqual result.toJS(), expected
testChineseQueryParse = ->
console.log '* test on Chinese query parser'
text = encodeURIComponent '中文'
result = pathUtil.parseQuery(o, fromJS("#{text}=#{text}".split('&')))
expected = {'中文': '中文'}
assert.deepEqual result.toJS(), expected
testLongQueryParse = ->
console.log '* test on long query parser'
longPath = 'ct=503316480&z=0&ipn=d&word=%E9%80%94%E5%AE%89&step_word=&pn=0&spn=0&di=1712569540&pi=&rn=1&tn=baiduimagedetail&is=&istype=2&ie=utf-8&oe=utf-8&in=&cl=2&lm=-1&st=-1&cs=2464434574%2C440997798&os=1619671487%2C69261469&simid=3496201219%2C355884747&adpicid=0&ln=1000&fr=&fmq=1457507365044_R&fm=&ic=0&s=undefined&se=&sme=&tab=0&width=&height=&face=undefined&ist=&jit=&cg=&bdtype=0&oriquery=&objurl=http%3A%2F%2Fphotocdn.sohu.com%2F20111102%2FImg324265977.jpg&fromurl=ippr_z2C%24qAzdH3FAzdH3F65ss_z%26e3Bf5i7_z%26e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z%26e3Bfip4s&gsm=0'
result = pathUtil.parseQuery(o, fromJS(longPath.split('&')))
expected = {"gsm":"0","lm":"-1","st":"-1","ln":"1000","oriquery":"","adpicid":"0","cg":"","os":"1619671487,69261469","istype":"2","di":"1712569540","in":"","fromurl":"ippr_z2C$qAzdH3FAzdH3F65ss_z&e3Bf5i7_z&e3Bv54AzdH3Fda8888adAzdH3Fgnd9dmcl0m_z&e3Bfip4s","width":"","ipn":"d","fm":"","height":"","cl":"2","fmq":"1457507365044_R","ist":"","is":"","word":"PI:NAME:<NAME>END_PI","sme":"","fr":"","cs":"2464434574,440997798","ct":"503316480","spn":"0","simid":"3496201219,355884747","se":"","s":"undefined","jit":"","tab":"0","oe":"utf-8","objurl":"http://photocdn.sohu.com/20111102/Img324265977.jpg","pi":"","z":"0","ic":"0","tn":"baiduimagedetail","ie":"utf-8","rn":"1","bdtype":"0","step_word":"","face":"undefined","pn":"0"}
assert.deepEqual result.toJS(), expected
testMakeAddress = ->
console.log '* test make address'
routes = fromJS a: [], b: ['c']
route = fromJS
name: 'b'
data:
c: '1'
query:
a: 'x'
result = pathUtil.makeAddress route, routes
expected = '/b/1?a=x'
assert.equal result, expected
testMakeChineseAddress = ->
console.log '* test make chinese address'
routes = fromJS a: [], '中文': ['name']
route = fromJS
name: '中文'
data:
name: '中文'
query:
'中文': '中文'
result = pathUtil.makeAddress route, routes
expected = '/中文/中文?%E4%B8%AD%E6%96%87=%E4%B8%AD%E6%96%87'
assert.equal result, expected
# Run
exports.run = ->
testTrimSlash()
testQueryParse()
testChineseQueryParse()
testLongQueryParse()
testMakeAddress()
testMakeChineseAddress()
exports.run()
|
[
{
"context": "elper.defaultGateway.customer.create {firstName: 'John', lastName: 'Smith'}, (err, response) ->\n ",
"end": 422,
"score": 0.9998196959495544,
"start": 418,
"tag": "NAME",
"value": "John"
},
{
"context": "ay.customer.create {firstName: 'John', lastName: 'Smith'}, ... | spec/integration/braintree/customer_gateway_spec.coffee | StreamCo/braintree_node | 0 | require('../../spec_helper')
{_} = require('underscore')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{Nonces} = require('../../../lib/braintree/test/nonces')
{Config} = require('../../../lib/braintree/config')
braintree = specHelper.braintree
describe "CustomerGateway", ->
describe "create", ->
it "creates a customer", (done) ->
specHelper.defaultGateway.customer.create {firstName: 'John', lastName: 'Smith'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'John')
assert.equal(response.customer.lastName, 'Smith')
done()
it "handles uft8 characters", (done) ->
specHelper.defaultGateway.customer.create {firstName: 'Jöhn', lastName: 'Smith'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'Jöhn')
assert.equal(response.customer.lastName, 'Smith')
done()
it "creates blank customers", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "stores custom fields", (done) ->
customerParams =
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.customFields.storeMe, 'custom value')
done()
context "and vaults a payment method", ->
it "creates customers with credit cards", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'John')
assert.equal(response.customer.lastName, 'Smith')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.isTrue(/^\w{32}$/.test(response.customer.creditCards[0].uniqueNumberIdentifier))
done()
it "creates a customer with a payment method nonce backed by a credit card", (done) ->
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params = {
authorizationFingerprint: authorizationFingerprint,
sharedCustomerIdentifierType: "testing",
sharedCustomerIdentifier: "testing-identifier",
share: true,
credit_card: {
number: "4111111111111111",
expiration_month: "11",
expiration_year: "2099"
}
}
myHttp.post("/client_api/v1/payment_methods/credit_cards.json", params, (statusCode, body) ->
nonce = JSON.parse(body).creditCards[0].nonce
customerParams =
creditCard:
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
)
it "creates a customer with an Apple Pay payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.ApplePayAmEx
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isNotNull(response.customer.applePayCards[0])
done()
it "creates a customer with a paypal account payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isString(response.customer.paypalAccounts[0].email)
done()
it "does not vault a paypal account only authorized for one-time use", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
done()
it "fails on duplicate payment methods when provided the option to do so", (done) ->
customerParams =
firstName: 'John',
lastName: 'Smith'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
failOnDuplicatePaymentMethod: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81724'
)
done()
it "allows verifying cards", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles unsuccessful verifications", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '6011000990139424'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.verification.status, 'processor_declined')
assert.equal(response.verification.processorResponseCode, '2000')
assert.equal(response.verification.processorResponseText, 'Do Not Honor')
done()
it "handles validation errors", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 1)
assert.include(errorCodes, '81715')
done()
it "allows creating a customer with a billing addres", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
billingAddress:
streetAddress: '123 Fake St'
extendedAddress: 'Suite 403'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
countryName: 'United States of America'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'John')
assert.equal(response.customer.lastName, 'Smith')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 Fake St')
assert.equal(billingAddress.extendedAddress, 'Suite 403')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(billingAddress.countryName, 'United States of America')
done()
it "handles validation errors on nested billing addresses", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
billingAddress:
countryName: 'invalid country'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.\nCountry name is not an accepted country.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
assert.equal(
response.errors.for('customer').for('creditCard').for('billingAddress').on('countryName')[0].code,
'91803'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81715')
assert.include(errorCodes, '91803')
assert.equal(response.params.customer.creditCard.expirationDate, '05/2012')
assert.equal(response.params.customer.creditCard.billingAddress.countryName, 'invalid country')
done()
it "creates a customer with venmo sdk payment method code", (done) ->
customerParams =
creditCard:
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
it "creates a customer with venmo sdk session", (done) ->
customerParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.customer.creditCards[0].venmoSdk)
done()
it "creates a customer with a params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
customerParams =
firstName: "Bob"
lastName: "Fisher"
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
describe "find", ->
it "finds a custoemr", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, 'John')
assert.equal(customer.lastName, 'Smith')
billingAddress = customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.company, '')
done()
it "returns both credit cards and paypal accounts for a given customer", (done) ->
customerParams =
firstName: 'John'
lastName: 'Smith'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, customerResponse) ->
assert.isNull(err)
assert.equal(customerResponse.success, true)
paypalAccountParams =
customerId: customerResponse.customer.id,
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paypalAccountParams, (err, paypalResponse) ->
assert.isNull(err)
assert.equal(paypalResponse.success, true)
specHelper.defaultGateway.customer.find customerResponse.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, 'John')
assert.equal(customer.lastName, 'Smith')
assert.equal(customer.creditCards.length, 1)
assert.equal(customer.paypalAccounts.length, 1)
done()
it "returns an error if unable to find the customer", (done) ->
specHelper.defaultGateway.customer.find 'nonexistent_customer', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.customer.find ' ', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "update", ->
customerId = null
beforeEach (done) ->
customerParams =
firstName: 'Old First Name'
lastName: 'Old Last Name'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
done()
it "updates a customer", (done) ->
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
done()
it "can add a new card to a customer", (done) ->
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
done()
context "vaulting a payment method", ->
it "can add a new card and billing address", (done) ->
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(response.customer.addresses[0].streetAddress, '123 E Fake St')
assert.equal(response.customer.addresses[0].locality, 'Chicago')
assert.equal(response.customer.addresses[0].region, 'IL')
assert.equal(response.customer.addresses[0].postalCode, '60607')
done()
it "vaults a paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.isString(response.customer.paypalAccounts[0].email)
assert.equal(response.customer.paypalAccounts[0].token, paymentMethodToken)
done()
it "does not vault a one-time use paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
paypalAccount:
accessToken: 'PAYPAL_ACCESS_TOKEN'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
specHelper.defaultGateway.paymentMethod.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "returns an error when not found", (done) ->
specHelper.defaultGateway.customer.update 'nonexistent_customer', {}, (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles validation errors", (done) ->
specHelper.defaultGateway.customer.update customerId, {email: 'invalid_email_address'}, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Email is an invalid format.')
assert.equal(
response.errors.for('customer').on('email')[0].code,
'81604'
)
assert.equal(
response.errors.for('customer').on('email')[0].attribute,
'email'
)
done()
context "with existing card and billing address", ->
creditCardToken = null
beforeEach (done) ->
customerParams =
firstName: 'Old First Name'
lastName: 'Old Last Name'
creditCard:
cardholderName: 'Old Cardholder Name'
number: '4111111111111111'
expirationDate: '04/2014'
billingAddress:
streetAddress: '123 Old St'
locality: 'Old City'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
creditCardToken = response.customer.creditCards[0].token
done()
it "udpates an existing card", (done) ->
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
creditCard:
cardholderName: 'New Cardholder Name'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New Cardholder Name')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
done()
it "updates an existing card and billing address", (done) ->
customerParams =
firstName: 'New First Name'
lastName: 'New Last Name'
creditCard:
cardholderName: 'New Cardholder Name'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
billingAddress:
streetAddress: '123 New St'
locality: 'New City'
options:
updateExisting: true
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New Cardholder Name')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
assert.equal(response.customer.addresses.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 New St')
assert.equal(billingAddress.locality, 'New City')
done()
it "doesn't serialize nulls as empty objects", (done) ->
customerParams =
creditCard:
number: '4111111111111111'
expirationDate: '05/2014'
billingAddress:
streetAddress: null
extendedAddress: "asd"
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, null)
done()
describe "delete", ->
it "deletes a customer", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
specHelper.defaultGateway.customer.delete response.customer.id, (err) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles invalid customer ids", (done) ->
specHelper.defaultGateway.customer.delete 'nonexistent_customer', (err) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
| 201515 | require('../../spec_helper')
{_} = require('underscore')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{Nonces} = require('../../../lib/braintree/test/nonces')
{Config} = require('../../../lib/braintree/config')
braintree = specHelper.braintree
describe "CustomerGateway", ->
describe "create", ->
it "creates a customer", (done) ->
specHelper.defaultGateway.customer.create {firstName: '<NAME>', lastName: '<NAME>'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
done()
it "handles uft8 characters", (done) ->
specHelper.defaultGateway.customer.create {firstName: '<NAME>', lastName: '<NAME>'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
done()
it "creates blank customers", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "stores custom fields", (done) ->
customerParams =
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.customFields.storeMe, 'custom value')
done()
context "and vaults a payment method", ->
it "creates customers with credit cards", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.isTrue(/^\w{32}$/.test(response.customer.creditCards[0].uniqueNumberIdentifier))
done()
it "creates a customer with a payment method nonce backed by a credit card", (done) ->
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params = {
authorizationFingerprint: authorizationFingerprint,
sharedCustomerIdentifierType: "testing",
sharedCustomerIdentifier: "testing-identifier",
share: true,
credit_card: {
number: "4111111111111111",
expiration_month: "11",
expiration_year: "2099"
}
}
myHttp.post("/client_api/v1/payment_methods/credit_cards.json", params, (statusCode, body) ->
nonce = JSON.parse(body).creditCards[0].nonce
customerParams =
creditCard:
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
)
it "creates a customer with an Apple Pay payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.ApplePayAmEx
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isNotNull(response.customer.applePayCards[0])
done()
it "creates a customer with a paypal account payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isString(response.customer.paypalAccounts[0].email)
done()
it "does not vault a paypal account only authorized for one-time use", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
done()
it "fails on duplicate payment methods when provided the option to do so", (done) ->
customerParams =
firstName: '<NAME>',
lastName: '<NAME>'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
failOnDuplicatePaymentMethod: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81724'
)
done()
it "allows verifying cards", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles unsuccessful verifications", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '6011000990139424'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.verification.status, 'processor_declined')
assert.equal(response.verification.processorResponseCode, '2000')
assert.equal(response.verification.processorResponseText, 'Do Not Honor')
done()
it "handles validation errors", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 1)
assert.include(errorCodes, '81715')
done()
it "allows creating a customer with a billing addres", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
billingAddress:
streetAddress: '123 Fake St'
extendedAddress: 'Suite 403'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
countryName: 'United States of America'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 Fake St')
assert.equal(billingAddress.extendedAddress, 'Suite 403')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(billingAddress.countryName, 'United States of America')
done()
it "handles validation errors on nested billing addresses", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
billingAddress:
countryName: 'invalid country'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.\nCountry name is not an accepted country.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
assert.equal(
response.errors.for('customer').for('creditCard').for('billingAddress').on('countryName')[0].code,
'91803'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81715')
assert.include(errorCodes, '91803')
assert.equal(response.params.customer.creditCard.expirationDate, '05/2012')
assert.equal(response.params.customer.creditCard.billingAddress.countryName, 'invalid country')
done()
it "creates a customer with venmo sdk payment method code", (done) ->
customerParams =
creditCard:
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
it "creates a customer with venmo sdk session", (done) ->
customerParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.customer.creditCards[0].venmoSdk)
done()
it "creates a customer with a params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
customerParams =
firstName: "<NAME>"
lastName: "<NAME>"
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
describe "find", ->
it "finds a custoemr", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, '<NAME>')
assert.equal(customer.lastName, '<NAME>')
billingAddress = customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.company, '')
done()
it "returns both credit cards and paypal accounts for a given customer", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, customerResponse) ->
assert.isNull(err)
assert.equal(customerResponse.success, true)
paypalAccountParams =
customerId: customerResponse.customer.id,
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paypalAccountParams, (err, paypalResponse) ->
assert.isNull(err)
assert.equal(paypalResponse.success, true)
specHelper.defaultGateway.customer.find customerResponse.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, '<NAME>')
assert.equal(customer.lastName, '<NAME>')
assert.equal(customer.creditCards.length, 1)
assert.equal(customer.paypalAccounts.length, 1)
done()
it "returns an error if unable to find the customer", (done) ->
specHelper.defaultGateway.customer.find 'nonexistent_customer', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.customer.find ' ', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "update", ->
customerId = null
beforeEach (done) ->
customerParams =
firstName: '<NAME>'
lastName: 'Old <NAME>'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
done()
it "updates a customer", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
done()
it "can add a new card to a customer", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
done()
context "vaulting a payment method", ->
it "can add a new card and billing address", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(response.customer.addresses[0].streetAddress, '123 E Fake St')
assert.equal(response.customer.addresses[0].locality, 'Chicago')
assert.equal(response.customer.addresses[0].region, 'IL')
assert.equal(response.customer.addresses[0].postalCode, '60607')
done()
it "vaults a paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME> Name')
assert.isString(response.customer.paypalAccounts[0].email)
assert.equal(response.customer.paypalAccounts[0].token, paymentMethodToken)
done()
it "does not vault a one-time use paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
paypalAccount:
accessToken: 'PAYPAL_ACCESS_TOKEN'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
specHelper.defaultGateway.paymentMethod.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "returns an error when not found", (done) ->
specHelper.defaultGateway.customer.update 'nonexistent_customer', {}, (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles validation errors", (done) ->
specHelper.defaultGateway.customer.update customerId, {email: 'invalid_email_address'}, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Email is an invalid format.')
assert.equal(
response.errors.for('customer').on('email')[0].code,
'81604'
)
assert.equal(
response.errors.for('customer').on('email')[0].attribute,
'email'
)
done()
context "with existing card and billing address", ->
creditCardToken = null
beforeEach (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '4111111111111111'
expirationDate: '04/2014'
billingAddress:
streetAddress: '123 Old St'
locality: 'Old City'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
creditCardToken = response.customer.creditCards[0].token
done()
it "udpates an existing card", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME> Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New Cardholder Name')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
done()
it "updates an existing card and billing address", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
billingAddress:
streetAddress: '123 New St'
locality: 'New City'
options:
updateExisting: true
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New <NAME>holder <NAME>')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
assert.equal(response.customer.addresses.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 New St')
assert.equal(billingAddress.locality, 'New City')
done()
it "doesn't serialize nulls as empty objects", (done) ->
customerParams =
creditCard:
number: '4111111111111111'
expirationDate: '05/2014'
billingAddress:
streetAddress: null
extendedAddress: "asd"
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, null)
done()
describe "delete", ->
it "deletes a customer", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
specHelper.defaultGateway.customer.delete response.customer.id, (err) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles invalid customer ids", (done) ->
specHelper.defaultGateway.customer.delete 'nonexistent_customer', (err) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
| true | require('../../spec_helper')
{_} = require('underscore')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{Nonces} = require('../../../lib/braintree/test/nonces')
{Config} = require('../../../lib/braintree/config')
braintree = specHelper.braintree
describe "CustomerGateway", ->
describe "create", ->
it "creates a customer", (done) ->
specHelper.defaultGateway.customer.create {firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
done()
it "handles uft8 characters", (done) ->
specHelper.defaultGateway.customer.create {firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
done()
it "creates blank customers", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "stores custom fields", (done) ->
customerParams =
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.customFields.storeMe, 'custom value')
done()
context "and vaults a payment method", ->
it "creates customers with credit cards", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.isTrue(/^\w{32}$/.test(response.customer.creditCards[0].uniqueNumberIdentifier))
done()
it "creates a customer with a payment method nonce backed by a credit card", (done) ->
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params = {
authorizationFingerprint: authorizationFingerprint,
sharedCustomerIdentifierType: "testing",
sharedCustomerIdentifier: "testing-identifier",
share: true,
credit_card: {
number: "4111111111111111",
expiration_month: "11",
expiration_year: "2099"
}
}
myHttp.post("/client_api/v1/payment_methods/credit_cards.json", params, (statusCode, body) ->
nonce = JSON.parse(body).creditCards[0].nonce
customerParams =
creditCard:
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
)
it "creates a customer with an Apple Pay payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.ApplePayAmEx
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isNotNull(response.customer.applePayCards[0])
done()
it "creates a customer with a paypal account payment method nonce", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isString(response.customer.paypalAccounts[0].email)
done()
it "does not vault a paypal account only authorized for one-time use", (done) ->
customerParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
done()
it "fails on duplicate payment methods when provided the option to do so", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI',
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
failOnDuplicatePaymentMethod: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81724'
)
done()
it "allows verifying cards", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5555555555554444'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles unsuccessful verifications", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '6011000990139424'
expirationDate: '05/2012'
options:
verifyCard: true
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.verification.status, 'processor_declined')
assert.equal(response.verification.processorResponseCode, '2000')
assert.equal(response.verification.processorResponseText, 'Do Not Honor')
done()
it "handles validation errors", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 1)
assert.include(errorCodes, '81715')
done()
it "allows creating a customer with a billing addres", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
billingAddress:
streetAddress: '123 Fake St'
extendedAddress: 'Suite 403'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
countryName: 'United States of America'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards.length, 1)
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2012')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 Fake St')
assert.equal(billingAddress.extendedAddress, 'Suite 403')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(billingAddress.countryName, 'United States of America')
done()
it "handles validation errors on nested billing addresses", (done) ->
customerParams =
creditCard:
number: 'invalid card number'
expirationDate: '05/2012'
billingAddress:
countryName: 'invalid country'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Credit card number is invalid.\nCountry name is not an accepted country.')
assert.equal(
response.errors.for('customer').for('creditCard').on('number')[0].code,
'81715'
)
assert.equal(
response.errors.for('customer').for('creditCard').for('billingAddress').on('countryName')[0].code,
'91803'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81715')
assert.include(errorCodes, '91803')
assert.equal(response.params.customer.creditCard.expirationDate, '05/2012')
assert.equal(response.params.customer.creditCard.billingAddress.countryName, 'invalid country')
done()
it "creates a customer with venmo sdk payment method code", (done) ->
customerParams =
creditCard:
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
it "creates a customer with venmo sdk session", (done) ->
customerParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.customer.creditCards[0].venmoSdk)
done()
it "creates a customer with a params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
customerParams =
firstName: "PI:NAME:<NAME>END_PI"
lastName: "PI:NAME:<NAME>END_PI"
paymentMethodNonce: nonce
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.creditCards[0].bin, "411111")
done()
)
describe "find", ->
it "finds a custoemr", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(customer.lastName, 'PI:NAME:<NAME>END_PI')
billingAddress = customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.company, '')
done()
it "returns both credit cards and paypal accounts for a given customer", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
company: ''
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.create customerParams, (err, customerResponse) ->
assert.isNull(err)
assert.equal(customerResponse.success, true)
paypalAccountParams =
customerId: customerResponse.customer.id,
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paypalAccountParams, (err, paypalResponse) ->
assert.isNull(err)
assert.equal(paypalResponse.success, true)
specHelper.defaultGateway.customer.find customerResponse.customer.id, (err, customer) ->
assert.isNull(err)
assert.equal(customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(customer.creditCards.length, 1)
assert.equal(customer.paypalAccounts.length, 1)
done()
it "returns an error if unable to find the customer", (done) ->
specHelper.defaultGateway.customer.find 'nonexistent_customer', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.customer.find ' ', (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "update", ->
customerId = null
beforeEach (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'Old PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
done()
it "updates a customer", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
done()
it "can add a new card to a customer", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
done()
context "vaulting a payment method", ->
it "can add a new card and billing address", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/2014'
billingAddress:
streetAddress: '123 E Fake St'
locality: 'Chicago'
region: 'IL'
postalCode: '60607'
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 E Fake St')
assert.equal(billingAddress.locality, 'Chicago')
assert.equal(billingAddress.region, 'IL')
assert.equal(billingAddress.postalCode, '60607')
assert.equal(response.customer.addresses[0].streetAddress, '123 E Fake St')
assert.equal(response.customer.addresses[0].locality, 'Chicago')
assert.equal(response.customer.addresses[0].region, 'IL')
assert.equal(response.customer.addresses[0].postalCode, '60607')
done()
it "vaults a paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI Name')
assert.isString(response.customer.paypalAccounts[0].email)
assert.equal(response.customer.paypalAccounts[0].token, paymentMethodToken)
done()
it "does not vault a one-time use paypal account", (done) ->
paymentMethodToken = specHelper.randomId()
specHelper.defaultGateway.customer.create {}, (err, response) ->
paypalCustomerId = response.customer.id
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
paypalAccount:
accessToken: 'PAYPAL_ACCESS_TOKEN'
token: paymentMethodToken
specHelper.defaultGateway.customer.update paypalCustomerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('customer').for('paypalAccount').on('base')[0].code,
'82902'
)
specHelper.defaultGateway.paymentMethod.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "returns an error when not found", (done) ->
specHelper.defaultGateway.customer.update 'nonexistent_customer', {}, (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles validation errors", (done) ->
specHelper.defaultGateway.customer.update customerId, {email: 'invalid_email_address'}, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Email is an invalid format.')
assert.equal(
response.errors.for('customer').on('email')[0].code,
'81604'
)
assert.equal(
response.errors.for('customer').on('email')[0].attribute,
'email'
)
done()
context "with existing card and billing address", ->
creditCardToken = null
beforeEach (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '4111111111111111'
expirationDate: '04/2014'
billingAddress:
streetAddress: '123 Old St'
locality: 'Old City'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
creditCardToken = response.customer.creditCards[0].token
done()
it "udpates an existing card", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New Cardholder Name')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
done()
it "updates an existing card and billing address", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '5105105105105100'
expirationDate: '05/2014'
options:
updateExistingToken: creditCardToken
billingAddress:
streetAddress: '123 New St'
locality: 'New City'
options:
updateExisting: true
specHelper.defaultGateway.customer.update customerId, customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].cardholderName, 'New PI:NAME:<NAME>END_PIholder PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards[0].expirationDate, '05/2014')
assert.equal(response.customer.addresses.length, 1)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, '123 New St')
assert.equal(billingAddress.locality, 'New City')
done()
it "doesn't serialize nulls as empty objects", (done) ->
customerParams =
creditCard:
number: '4111111111111111'
expirationDate: '05/2014'
billingAddress:
streetAddress: null
extendedAddress: "asd"
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
billingAddress = response.customer.creditCards[0].billingAddress
assert.equal(billingAddress.streetAddress, null)
done()
describe "delete", ->
it "deletes a customer", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
specHelper.defaultGateway.customer.delete response.customer.id, (err) ->
assert.isNull(err)
specHelper.defaultGateway.customer.find response.customer.id, (err, customer) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles invalid customer ids", (done) ->
specHelper.defaultGateway.customer.delete 'nonexistent_customer', (err) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
|
[
{
"context": "nforces spaces inside of array brackets.\n# @author Jamund Ferguson\n###\n'use strict'\n\nastUtils = require '../eslint-a",
"end": 101,
"score": 0.9998721480369568,
"start": 86,
"tag": "NAME",
"value": "Jamund Ferguson"
}
] | src/rules/array-bracket-spacing.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Disallows or enforces spaces inside of array brackets.
# @author Jamund Ferguson
###
'use strict'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent spacing inside array brackets'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/array-bracket-spacing'
fixable: 'whitespace'
schema: [
enum: ['always', 'never']
,
type: 'object'
properties:
singleValue:
type: 'boolean'
objectsInArrays:
type: 'boolean'
arraysInArrays:
type: 'boolean'
additionalProperties: no
]
messages:
unexpectedSpaceAfter: "There should be no space after '{{tokenValue}}'."
unexpectedSpaceBefore: "There should be no space before '{{tokenValue}}'."
missingSpaceAfter: "A space is required after '{{tokenValue}}'."
missingSpaceBefore: "A space is required before '{{tokenValue}}'."
create: (context) ->
spaced = context.options[0] is 'always'
sourceCode = context.getSourceCode()
###*
# Determines whether an option is set, relative to the spacing option.
# If spaced is "always", then check whether option is set to false.
# If spaced is "never", then check whether option is set to true.
# @param {Object} option - The option to exclude.
# @returns {boolean} Whether or not the property is excluded.
###
isOptionSet = (option) ->
if context.options[1]
context.options[1][option] is not spaced
else
no
options = {
spaced
singleElementException: isOptionSet 'singleValue'
objectsInArraysException: isOptionSet 'objectsInArrays'
arraysInArraysException: isOptionSet 'arraysInArrays'
}
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# Reports that there shouldn't be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) ->
nextToken = sourceCode.getTokenAfter token
fixer.removeRange [token.range[1], nextToken.range[0]]
}
###*
# Reports that there shouldn't be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) ->
previousToken = sourceCode.getTokenBefore token
fixer.removeRange [previousToken.range[1], token.range[0]]
}
###*
# Reports that there should be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextAfter token, ' '
}
###*
# Reports that there should be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextBefore token, ' '
}
###*
# Determines if a node is an object type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an object type.
###
isObjectType = (node) ->
node and
node.type in ['ObjectExpression', 'ObjectPattern'] and
not node.implicit
###*
# Determines if a node is an array type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an array type.
###
isArrayType = (node) ->
node and node.type in ['ArrayExpression', 'ArrayPattern']
###*
# Validates the spacing around array brackets
# @param {ASTNode} node - The node we're checking for spacing
# @returns {void}
###
validateArraySpacing = (node) ->
return if options.spaced and node.elements.length is 0
first = sourceCode.getFirstToken node
second = sourceCode.getFirstToken node, 1
last =
if node.typeAnnotation
sourceCode.getTokenBefore node.typeAnnotation
else
sourceCode.getLastToken node
penultimate = sourceCode.getTokenBefore last
firstElement = node.elements[0]
lastElement = node.elements[node.elements.length - 1]
openingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(firstElement)) or
(options.arraysInArraysException and isArrayType(firstElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
closingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(lastElement)) or
(options.arraysInArraysException and isArrayType(lastElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
if astUtils.isTokenOnSameLine first, second
if (
openingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens first, second
)
reportRequiredBeginningSpace node, first
if (
not openingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens first, second
)
reportNoBeginningSpace node, first
if first isnt penultimate and astUtils.isTokenOnSameLine penultimate, last
if (
closingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens penultimate, last
)
reportRequiredEndingSpace node, last
if (
not closingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens penultimate, last
)
reportNoEndingSpace node, last
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
ArrayPattern: validateArraySpacing
ArrayExpression: validateArraySpacing
| 14312 | ###*
# @fileoverview Disallows or enforces spaces inside of array brackets.
# @author <NAME>
###
'use strict'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent spacing inside array brackets'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/array-bracket-spacing'
fixable: 'whitespace'
schema: [
enum: ['always', 'never']
,
type: 'object'
properties:
singleValue:
type: 'boolean'
objectsInArrays:
type: 'boolean'
arraysInArrays:
type: 'boolean'
additionalProperties: no
]
messages:
unexpectedSpaceAfter: "There should be no space after '{{tokenValue}}'."
unexpectedSpaceBefore: "There should be no space before '{{tokenValue}}'."
missingSpaceAfter: "A space is required after '{{tokenValue}}'."
missingSpaceBefore: "A space is required before '{{tokenValue}}'."
create: (context) ->
spaced = context.options[0] is 'always'
sourceCode = context.getSourceCode()
###*
# Determines whether an option is set, relative to the spacing option.
# If spaced is "always", then check whether option is set to false.
# If spaced is "never", then check whether option is set to true.
# @param {Object} option - The option to exclude.
# @returns {boolean} Whether or not the property is excluded.
###
isOptionSet = (option) ->
if context.options[1]
context.options[1][option] is not spaced
else
no
options = {
spaced
singleElementException: isOptionSet 'singleValue'
objectsInArraysException: isOptionSet 'objectsInArrays'
arraysInArraysException: isOptionSet 'arraysInArrays'
}
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# Reports that there shouldn't be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) ->
nextToken = sourceCode.getTokenAfter token
fixer.removeRange [token.range[1], nextToken.range[0]]
}
###*
# Reports that there shouldn't be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) ->
previousToken = sourceCode.getTokenBefore token
fixer.removeRange [previousToken.range[1], token.range[0]]
}
###*
# Reports that there should be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextAfter token, ' '
}
###*
# Reports that there should be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextBefore token, ' '
}
###*
# Determines if a node is an object type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an object type.
###
isObjectType = (node) ->
node and
node.type in ['ObjectExpression', 'ObjectPattern'] and
not node.implicit
###*
# Determines if a node is an array type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an array type.
###
isArrayType = (node) ->
node and node.type in ['ArrayExpression', 'ArrayPattern']
###*
# Validates the spacing around array brackets
# @param {ASTNode} node - The node we're checking for spacing
# @returns {void}
###
validateArraySpacing = (node) ->
return if options.spaced and node.elements.length is 0
first = sourceCode.getFirstToken node
second = sourceCode.getFirstToken node, 1
last =
if node.typeAnnotation
sourceCode.getTokenBefore node.typeAnnotation
else
sourceCode.getLastToken node
penultimate = sourceCode.getTokenBefore last
firstElement = node.elements[0]
lastElement = node.elements[node.elements.length - 1]
openingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(firstElement)) or
(options.arraysInArraysException and isArrayType(firstElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
closingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(lastElement)) or
(options.arraysInArraysException and isArrayType(lastElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
if astUtils.isTokenOnSameLine first, second
if (
openingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens first, second
)
reportRequiredBeginningSpace node, first
if (
not openingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens first, second
)
reportNoBeginningSpace node, first
if first isnt penultimate and astUtils.isTokenOnSameLine penultimate, last
if (
closingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens penultimate, last
)
reportRequiredEndingSpace node, last
if (
not closingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens penultimate, last
)
reportNoEndingSpace node, last
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
ArrayPattern: validateArraySpacing
ArrayExpression: validateArraySpacing
| true | ###*
# @fileoverview Disallows or enforces spaces inside of array brackets.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent spacing inside array brackets'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/array-bracket-spacing'
fixable: 'whitespace'
schema: [
enum: ['always', 'never']
,
type: 'object'
properties:
singleValue:
type: 'boolean'
objectsInArrays:
type: 'boolean'
arraysInArrays:
type: 'boolean'
additionalProperties: no
]
messages:
unexpectedSpaceAfter: "There should be no space after '{{tokenValue}}'."
unexpectedSpaceBefore: "There should be no space before '{{tokenValue}}'."
missingSpaceAfter: "A space is required after '{{tokenValue}}'."
missingSpaceBefore: "A space is required before '{{tokenValue}}'."
create: (context) ->
spaced = context.options[0] is 'always'
sourceCode = context.getSourceCode()
###*
# Determines whether an option is set, relative to the spacing option.
# If spaced is "always", then check whether option is set to false.
# If spaced is "never", then check whether option is set to true.
# @param {Object} option - The option to exclude.
# @returns {boolean} Whether or not the property is excluded.
###
isOptionSet = (option) ->
if context.options[1]
context.options[1][option] is not spaced
else
no
options = {
spaced
singleElementException: isOptionSet 'singleValue'
objectsInArraysException: isOptionSet 'objectsInArrays'
arraysInArraysException: isOptionSet 'arraysInArrays'
}
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# Reports that there shouldn't be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) ->
nextToken = sourceCode.getTokenAfter token
fixer.removeRange [token.range[1], nextToken.range[0]]
}
###*
# Reports that there shouldn't be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportNoEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'unexpectedSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) ->
previousToken = sourceCode.getTokenBefore token
fixer.removeRange [previousToken.range[1], token.range[0]]
}
###*
# Reports that there should be a space after the first token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredBeginningSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceAfter'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextAfter token, ' '
}
###*
# Reports that there should be a space before the last token
# @param {ASTNode} node - The node to report in the event of an error.
# @param {Token} token - The token to use for the report.
# @returns {void}
###
reportRequiredEndingSpace = (node, token) ->
context.report {
node
loc: token.loc.start
messageId: 'missingSpaceBefore'
data:
tokenValue: token.value
fix: (fixer) -> fixer.insertTextBefore token, ' '
}
###*
# Determines if a node is an object type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an object type.
###
isObjectType = (node) ->
node and
node.type in ['ObjectExpression', 'ObjectPattern'] and
not node.implicit
###*
# Determines if a node is an array type
# @param {ASTNode} node - The node to check.
# @returns {boolean} Whether or not the node is an array type.
###
isArrayType = (node) ->
node and node.type in ['ArrayExpression', 'ArrayPattern']
###*
# Validates the spacing around array brackets
# @param {ASTNode} node - The node we're checking for spacing
# @returns {void}
###
validateArraySpacing = (node) ->
return if options.spaced and node.elements.length is 0
first = sourceCode.getFirstToken node
second = sourceCode.getFirstToken node, 1
last =
if node.typeAnnotation
sourceCode.getTokenBefore node.typeAnnotation
else
sourceCode.getLastToken node
penultimate = sourceCode.getTokenBefore last
firstElement = node.elements[0]
lastElement = node.elements[node.elements.length - 1]
openingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(firstElement)) or
(options.arraysInArraysException and isArrayType(firstElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
closingBracketMustBeSpaced =
if (
(options.objectsInArraysException and isObjectType(lastElement)) or
(options.arraysInArraysException and isArrayType(lastElement)) or
(options.singleElementException and node.elements.length is 1)
)
not options.spaced
else
options.spaced
if astUtils.isTokenOnSameLine first, second
if (
openingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens first, second
)
reportRequiredBeginningSpace node, first
if (
not openingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens first, second
)
reportNoBeginningSpace node, first
if first isnt penultimate and astUtils.isTokenOnSameLine penultimate, last
if (
closingBracketMustBeSpaced and
not sourceCode.isSpaceBetweenTokens penultimate, last
)
reportRequiredEndingSpace node, last
if (
not closingBracketMustBeSpaced and
sourceCode.isSpaceBetweenTokens penultimate, last
)
reportNoEndingSpace node, last
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
ArrayPattern: validateArraySpacing
ArrayExpression: validateArraySpacing
|
[
{
"context": "rk: 'wifi'\n\t\twifiSsid: 'testWifiSsid'\n\t\twifiKey: 'testWifiKey'\n\n\tsdk.models.device.get(DEVICES.raspberrypi.i",
"end": 3265,
"score": 0.9937201738357544,
"start": 3257,
"tag": "KEY",
"value": "testWifi"
},
{
"context": "iSsid')\n\t\tm.chai.expect(config.wifiKey)... | tests/e2e.coffee | resin-io-modules/resin-device-init | 3 | m = require('mochainon')
_ = require('lodash')
os = require('os')
path = require('path')
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs'))
wary = require('wary')
settings = require('balena-settings-client')
sdk = require('balena-sdk')({
apiUrl: settings.get('apiUrl')
})
imagefs = require('balena-image-fs')
init = require('../lib/init')
RASPBERRYPI_OS1 = path.join(__dirname, 'images', 'raspberrypi-os1.img')
RASPBERRYPI_OS2 = path.join(__dirname, 'images', 'raspberrypi-os2.img')
RASPBERRYPI_WITH_DEVICE_TYPE = path.join(__dirname, 'images', 'raspberrypi-with-device-type.img')
EDISON = path.join(__dirname, 'images', 'edison')
RANDOM = path.join(__dirname, 'images', 'device.random')
DEVICES = {}
prepareDevice = (deviceType) ->
applicationName = "DeviceInitE2E_#{deviceType.replace(/[- ]/, '_')}"
console.log("Creating #{applicationName}")
sdk.models.application.has(applicationName).then (hasApplication) ->
return if hasApplication
sdk.models.application.create({
name: applicationName
deviceType
})
.then(sdk.models.device.generateUniqueKey)
.then (uuid) ->
sdk.models.device.register(applicationName, uuid)
extract = (stream) ->
return new Promise (resolve, reject) ->
result = ''
stream.on('error', reject)
stream.on 'data', (chunk) ->
result += chunk
stream.on 'end', ->
resolve(result)
waitStream = (stream) ->
return new Promise (resolve, reject) ->
stream.on('error', reject)
stream.on('close', resolve)
stream.on('end', resolve)
getManifest = (slug) ->
sdk.models.device.getManifestBySlug(slug)
########################################################################
# Raspberry Pi
########################################################################
wary.it 'should add a config.json correctly to a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add a correct config.json to a raspberry pi containing a device-type.json',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
5
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add network correctly to a 1.x raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: 'testWifiKey'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.wifiSsid).to.equal('testWifiSsid')
m.chai.expect(config.wifiKey).to.equal('testWifiKey')
m.chai.expect(config.files['network/network.config']).to.include('Name=testWifiSsid')
m.chai.expect(config.files['network/network.config']).to.include('Passphrase=testWifiKey')
wary.it 'should add network correctly to a 2.x raspberry pi',
raspberrypi: RASPBERRYPI_OS2
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: 'testWifiKey'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/system-connections/resin-wifi', { encoding: 'utf8' })
)
.then (wifiConfig) ->
m.chai.expect(wifiConfig).to.include('ssid=testWifiSsid')
m.chai.expect(wifiConfig).to.include('psk=testWifiKey')
wary.it 'should not trigger a state event when configuring a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should initialize a raspberry pi image',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should initialize a raspberry pi image containing a device type',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should emit state events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('state', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.calledOnce
args = spy.firstCall.args
m.chai.expect(args[0].operation.command).to.equal('burn')
m.chai.expect(args[0].percentage).to.equal(100)
wary.it 'should emit burn events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('burn', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.called
args = spy.lastCall.args
m.chai.expect(args[0].percentage).to.equal(100)
m.chai.expect(args[0].eta).to.equal(0)
########################################################################
# Intel Edison
########################################################################
wary.it 'should add a config.json to an intel edison',
edison: EDISON
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
path.join(images.edison, 'resin-image-edison.hddimg')
undefined
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should not trigger a state event when configuring an intel edison',
edison: EDISON
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should be able to initialize an intel edison with a script',
edison: EDISON
, (images) ->
stdout = ''
stderr = ''
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then(waitStream).then ->
init.initialize(images.edison, manifest, {})
.then (initialization) ->
initialization.on 'stdout', (data) ->
stdout += data
initialization.on 'stderr', (data) ->
stderr += data
return waitStream(initialization)
.then ->
m.chai.expect(stdout.replace(/[\n\r]/g, '')).to.equal('Hello World')
m.chai.expect(stderr).to.equal('')
Promise.try ->
require('dotenv').config(silent: true)
.then ->
sdk.auth.login
email: process.env.TEST_EMAIL
password: process.env.TEST_PASSWORD
.then ->
console.log('Logged in')
Promise.props
raspberrypi: prepareDevice('raspberry-pi')
edison: prepareDevice('intel-edison')
.then (devices) ->
DEVICES = devices
wary.run()
.catch (error) ->
console.error(error, error.stack)
process.exit(1)
| 90744 | m = require('mochainon')
_ = require('lodash')
os = require('os')
path = require('path')
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs'))
wary = require('wary')
settings = require('balena-settings-client')
sdk = require('balena-sdk')({
apiUrl: settings.get('apiUrl')
})
imagefs = require('balena-image-fs')
init = require('../lib/init')
RASPBERRYPI_OS1 = path.join(__dirname, 'images', 'raspberrypi-os1.img')
RASPBERRYPI_OS2 = path.join(__dirname, 'images', 'raspberrypi-os2.img')
RASPBERRYPI_WITH_DEVICE_TYPE = path.join(__dirname, 'images', 'raspberrypi-with-device-type.img')
EDISON = path.join(__dirname, 'images', 'edison')
RANDOM = path.join(__dirname, 'images', 'device.random')
DEVICES = {}
prepareDevice = (deviceType) ->
applicationName = "DeviceInitE2E_#{deviceType.replace(/[- ]/, '_')}"
console.log("Creating #{applicationName}")
sdk.models.application.has(applicationName).then (hasApplication) ->
return if hasApplication
sdk.models.application.create({
name: applicationName
deviceType
})
.then(sdk.models.device.generateUniqueKey)
.then (uuid) ->
sdk.models.device.register(applicationName, uuid)
extract = (stream) ->
return new Promise (resolve, reject) ->
result = ''
stream.on('error', reject)
stream.on 'data', (chunk) ->
result += chunk
stream.on 'end', ->
resolve(result)
waitStream = (stream) ->
return new Promise (resolve, reject) ->
stream.on('error', reject)
stream.on('close', resolve)
stream.on('end', resolve)
getManifest = (slug) ->
sdk.models.device.getManifestBySlug(slug)
########################################################################
# Raspberry Pi
########################################################################
wary.it 'should add a config.json correctly to a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add a correct config.json to a raspberry pi containing a device-type.json',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
5
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add network correctly to a 1.x raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: '<KEY>Key'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.wifiSsid).to.equal('testWifiSsid')
m.chai.expect(config.wifiKey).to.equal('<KEY>WifiKey')
m.chai.expect(config.files['network/network.config']).to.include('Name=testWifiSsid')
m.chai.expect(config.files['network/network.config']).to.include('Passphrase=<KEY>WifiKey')
wary.it 'should add network correctly to a 2.x raspberry pi',
raspberrypi: RASPBERRYPI_OS2
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: '<KEY>'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/system-connections/resin-wifi', { encoding: 'utf8' })
)
.then (wifiConfig) ->
m.chai.expect(wifiConfig).to.include('ssid=testWifiSsid')
m.chai.expect(wifiConfig).to.include('psk=testWifiKey')
wary.it 'should not trigger a state event when configuring a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should initialize a raspberry pi image',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should initialize a raspberry pi image containing a device type',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should emit state events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('state', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.calledOnce
args = spy.firstCall.args
m.chai.expect(args[0].operation.command).to.equal('burn')
m.chai.expect(args[0].percentage).to.equal(100)
wary.it 'should emit burn events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('burn', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.called
args = spy.lastCall.args
m.chai.expect(args[0].percentage).to.equal(100)
m.chai.expect(args[0].eta).to.equal(0)
########################################################################
# Intel Edison
########################################################################
wary.it 'should add a config.json to an intel edison',
edison: EDISON
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
path.join(images.edison, 'resin-image-edison.hddimg')
undefined
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should not trigger a state event when configuring an intel edison',
edison: EDISON
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should be able to initialize an intel edison with a script',
edison: EDISON
, (images) ->
stdout = ''
stderr = ''
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then(waitStream).then ->
init.initialize(images.edison, manifest, {})
.then (initialization) ->
initialization.on 'stdout', (data) ->
stdout += data
initialization.on 'stderr', (data) ->
stderr += data
return waitStream(initialization)
.then ->
m.chai.expect(stdout.replace(/[\n\r]/g, '')).to.equal('Hello World')
m.chai.expect(stderr).to.equal('')
Promise.try ->
require('dotenv').config(silent: true)
.then ->
sdk.auth.login
email: process.env.TEST_EMAIL
password: <PASSWORD>
.then ->
console.log('Logged in')
Promise.props
raspberrypi: prepareDevice('raspberry-pi')
edison: prepareDevice('intel-edison')
.then (devices) ->
DEVICES = devices
wary.run()
.catch (error) ->
console.error(error, error.stack)
process.exit(1)
| true | m = require('mochainon')
_ = require('lodash')
os = require('os')
path = require('path')
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs'))
wary = require('wary')
settings = require('balena-settings-client')
sdk = require('balena-sdk')({
apiUrl: settings.get('apiUrl')
})
imagefs = require('balena-image-fs')
init = require('../lib/init')
RASPBERRYPI_OS1 = path.join(__dirname, 'images', 'raspberrypi-os1.img')
RASPBERRYPI_OS2 = path.join(__dirname, 'images', 'raspberrypi-os2.img')
RASPBERRYPI_WITH_DEVICE_TYPE = path.join(__dirname, 'images', 'raspberrypi-with-device-type.img')
EDISON = path.join(__dirname, 'images', 'edison')
RANDOM = path.join(__dirname, 'images', 'device.random')
DEVICES = {}
prepareDevice = (deviceType) ->
applicationName = "DeviceInitE2E_#{deviceType.replace(/[- ]/, '_')}"
console.log("Creating #{applicationName}")
sdk.models.application.has(applicationName).then (hasApplication) ->
return if hasApplication
sdk.models.application.create({
name: applicationName
deviceType
})
.then(sdk.models.device.generateUniqueKey)
.then (uuid) ->
sdk.models.device.register(applicationName, uuid)
extract = (stream) ->
return new Promise (resolve, reject) ->
result = ''
stream.on('error', reject)
stream.on 'data', (chunk) ->
result += chunk
stream.on 'end', ->
resolve(result)
waitStream = (stream) ->
return new Promise (resolve, reject) ->
stream.on('error', reject)
stream.on('close', resolve)
stream.on('end', resolve)
getManifest = (slug) ->
sdk.models.device.getManifestBySlug(slug)
########################################################################
# Raspberry Pi
########################################################################
wary.it 'should add a config.json correctly to a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add a correct config.json to a raspberry pi containing a device-type.json',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
5
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should add network correctly to a 1.x raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: 'PI:KEY:<KEY>END_PIKey'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.wifiSsid).to.equal('testWifiSsid')
m.chai.expect(config.wifiKey).to.equal('PI:KEY:<KEY>END_PIWifiKey')
m.chai.expect(config.files['network/network.config']).to.include('Name=testWifiSsid')
m.chai.expect(config.files['network/network.config']).to.include('Passphrase=PI:PASSWORD:<KEY>END_PIWifiKey')
wary.it 'should add network correctly to a 2.x raspberry pi',
raspberrypi: RASPBERRYPI_OS2
, (images) ->
options =
network: 'wifi'
wifiSsid: 'testWifiSsid'
wifiKey: 'PI:KEY:<KEY>END_PI'
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {}, options)
.then(waitStream)
.then ->
imagefs.interact(
images.raspberrypi
1
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/system-connections/resin-wifi', { encoding: 'utf8' })
)
.then (wifiConfig) ->
m.chai.expect(wifiConfig).to.include('ssid=testWifiSsid')
m.chai.expect(wifiConfig).to.include('psk=testWifiKey')
wary.it 'should not trigger a state event when configuring a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should initialize a raspberry pi image',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should initialize a raspberry pi image containing a device type',
raspberrypi: RASPBERRYPI_WITH_DEVICE_TYPE
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
# make sure the device-type.json file is read from the image
init.getImageManifest(images.raspberrypi).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then(waitStream).then ->
Promise.props
raspberrypi: fs.readFileAsync(images.raspberrypi)
random: fs.readFileAsync(images.random)
.then (results) ->
m.chai.expect(results.random).to.deep.equal(results.raspberrypi)
wary.it 'should emit state events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('state', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.calledOnce
args = spy.firstCall.args
m.chai.expect(args[0].operation.command).to.equal('burn')
m.chai.expect(args[0].percentage).to.equal(100)
wary.it 'should emit burn events when initializing a raspberry pi',
raspberrypi: RASPBERRYPI_OS1
random: RANDOM
, (images) ->
drive =
raw: images.random
size: fs.statSync(images.random).size
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.raspberrypi.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.raspberrypi, manifest, {})
.then(waitStream).then ->
init.initialize(images.raspberrypi, manifest, { drive })
.then (initialization) ->
initialization.on('burn', spy)
return waitStream(initialization)
.then ->
m.chai.expect(spy).to.have.been.called
args = spy.lastCall.args
m.chai.expect(args[0].percentage).to.equal(100)
m.chai.expect(args[0].eta).to.equal(0)
########################################################################
# Intel Edison
########################################################################
wary.it 'should add a config.json to an intel edison',
edison: EDISON
, (images) ->
config =
isTestConfig: true
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, config)
.then(waitStream)
.then ->
imagefs.interact(
path.join(images.edison, 'resin-image-edison.hddimg')
undefined
(fs) ->
readFileAsync = Promise.promisify(fs.readFile)
return readFileAsync('/config.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.then (config) ->
m.chai.expect(config.isTestConfig).to.equal(true)
wary.it 'should not trigger a state event when configuring an intel edison',
edison: EDISON
, (images) ->
spy = m.sinon.spy()
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then (configuration) ->
configuration.on('state', spy)
return waitStream(configuration)
.then ->
m.chai.expect(spy).to.not.have.been.called
wary.it 'should be able to initialize an intel edison with a script',
edison: EDISON
, (images) ->
stdout = ''
stderr = ''
sdk.models.device.get(DEVICES.edison.id).then (device) ->
getManifest(device.device_type).then (manifest) ->
init.configure(images.edison, manifest, {})
.then(waitStream).then ->
init.initialize(images.edison, manifest, {})
.then (initialization) ->
initialization.on 'stdout', (data) ->
stdout += data
initialization.on 'stderr', (data) ->
stderr += data
return waitStream(initialization)
.then ->
m.chai.expect(stdout.replace(/[\n\r]/g, '')).to.equal('Hello World')
m.chai.expect(stderr).to.equal('')
Promise.try ->
require('dotenv').config(silent: true)
.then ->
sdk.auth.login
email: process.env.TEST_EMAIL
password: PI:PASSWORD:<PASSWORD>END_PI
.then ->
console.log('Logged in')
Promise.props
raspberrypi: prepareDevice('raspberry-pi')
edison: prepareDevice('intel-edison')
.then (devices) ->
DEVICES = devices
wary.run()
.catch (error) ->
console.error(error, error.stack)
process.exit(1)
|
[
{
"context": "ter buf\n len = cleverBufferWriter.writeString 'héllo', {encoding: 'utf16le'}\n len.should.eql 10\n\n ",
"end": 6735,
"score": 0.9181643724441528,
"start": 6730,
"tag": "NAME",
"value": "héllo"
},
{
"context": "ter buf\n len = cleverBufferWriter.writeString 'H... | test/clever-buffer-writer.spec.coffee | deancouch/clever-buffer | 0 | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
specHelper = require './spec-helper'
describe 'CleverBufferWriter', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'EXPECTED RETURN!'
len += cleverBufferWriter.writeString 'RETURN OF $2.00!'
len.should.eql 32
cleverBufferWriter.getOffset().should.eql 32
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string in multi-byte encodings', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'héllo', {encoding: 'utf-8'}
len.should.eql 6
cleverBufferWriter.getOffset().should.eql 6
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x6c, 0x6f, 0x00, 0x00, 0x00, 0x00
]
# because of buffer.write(value, offset, length, encoding)
it 'takes the encoding param into account, even if length is not specified', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'héllo', {encoding: 'utf16le'}
len.should.eql 10
it 'should write partial strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'HELLOWORLD', {length: 5}
#Only writes hello
len.should.eql 5
cleverBufferWriter.getOffset().should.eql 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write partial multi-byte strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'héllo', {length: 4}
# Only writes hél
len.should.eql 4
cleverBufferWriter.getOffset().should.eql 4
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'does not write partially encoded characters', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'éè', {length: 3}
# Only writes é
len.should.eql 2
cleverBufferWriter.getOffset().should.eql 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0xc3, 0xa9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write string at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLO', {offset: 5}
#Writes hello starting at offset 5
cleverBufferWriter.getOffset().should.eql 0
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
it 'does nothing silently when writing past the length', ->
buf = new Buffer [0x0]
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt8(1)
cleverBuffer.writeUInt8(1)
buf.should.eql new Buffer [0x1]
it 'throws an exception when writing past the length with noAssert off', ->
buf = new Buffer [0x1]
cleverBuffer = new CleverBufferWriter buf, {noAssert: false}
cleverBuffer.writeUInt8(1)
(-> cleverBuffer.writeUInt8(1)).should.throw()
describe 'leading zeros are handling correctly', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle leading zero for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] "00123"
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] "-00123"
buf1.should.eql buf2
describe 'check we handle numbers and strings identically', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle numbers and strings for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] 123
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] -123
buf1.should.eql buf2
describe 'check only throwing exception for writing negative unsigned integers when noAssert:false', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
bigEndian: [false, true]
}
do ({size, bigEndian} = testCase) ->
it "should throw for noAssert:false #{JSON.stringify testCase}", ->
cleverBuffer = new CleverBufferWriter (new Buffer size),
bigEndian: bigEndian
noAssert: false
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.throw(TypeError)
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.throw(TypeError)
it "should not throw for noAssert:true #{JSON.stringify testCase}", ->
buf = new Buffer size
cleverBuffer = new CleverBufferWriter buf,
bigEndian: bigEndian
noAssert: true
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.not.throw()
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.not.throw()
| 33926 | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
specHelper = require './spec-helper'
describe 'CleverBufferWriter', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'EXPECTED RETURN!'
len += cleverBufferWriter.writeString 'RETURN OF $2.00!'
len.should.eql 32
cleverBufferWriter.getOffset().should.eql 32
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string in multi-byte encodings', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'héllo', {encoding: 'utf-8'}
len.should.eql 6
cleverBufferWriter.getOffset().should.eql 6
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x6c, 0x6f, 0x00, 0x00, 0x00, 0x00
]
# because of buffer.write(value, offset, length, encoding)
it 'takes the encoding param into account, even if length is not specified', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString '<NAME>', {encoding: 'utf16le'}
len.should.eql 10
it 'should write partial strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString '<NAME>', {length: 5}
#Only writes hello
len.should.eql 5
cleverBufferWriter.getOffset().should.eql 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write partial multi-byte strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString '<NAME>', {length: 4}
# Only writes hél
len.should.eql 4
cleverBufferWriter.getOffset().should.eql 4
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'does not write partially encoded characters', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'éè', {length: 3}
# Only writes é
len.should.eql 2
cleverBufferWriter.getOffset().should.eql 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0xc3, 0xa9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write string at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLO', {offset: 5}
#Writes hello starting at offset 5
cleverBufferWriter.getOffset().should.eql 0
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
it 'does nothing silently when writing past the length', ->
buf = new Buffer [0x0]
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt8(1)
cleverBuffer.writeUInt8(1)
buf.should.eql new Buffer [0x1]
it 'throws an exception when writing past the length with noAssert off', ->
buf = new Buffer [0x1]
cleverBuffer = new CleverBufferWriter buf, {noAssert: false}
cleverBuffer.writeUInt8(1)
(-> cleverBuffer.writeUInt8(1)).should.throw()
describe 'leading zeros are handling correctly', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle leading zero for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] "00123"
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] "-00123"
buf1.should.eql buf2
describe 'check we handle numbers and strings identically', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle numbers and strings for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] 123
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] -123
buf1.should.eql buf2
describe 'check only throwing exception for writing negative unsigned integers when noAssert:false', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
bigEndian: [false, true]
}
do ({size, bigEndian} = testCase) ->
it "should throw for noAssert:false #{JSON.stringify testCase}", ->
cleverBuffer = new CleverBufferWriter (new Buffer size),
bigEndian: bigEndian
noAssert: false
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.throw(TypeError)
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.throw(TypeError)
it "should not throw for noAssert:true #{JSON.stringify testCase}", ->
buf = new Buffer size
cleverBuffer = new CleverBufferWriter buf,
bigEndian: bigEndian
noAssert: true
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.not.throw()
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.not.throw()
| true | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
specHelper = require './spec-helper'
describe 'CleverBufferWriter', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'EXPECTED RETURN!'
len += cleverBufferWriter.writeString 'RETURN OF $2.00!'
len.should.eql 32
cleverBufferWriter.getOffset().should.eql 32
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string in multi-byte encodings', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'héllo', {encoding: 'utf-8'}
len.should.eql 6
cleverBufferWriter.getOffset().should.eql 6
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x6c, 0x6f, 0x00, 0x00, 0x00, 0x00
]
# because of buffer.write(value, offset, length, encoding)
it 'takes the encoding param into account, even if length is not specified', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'PI:NAME:<NAME>END_PI', {encoding: 'utf16le'}
len.should.eql 10
it 'should write partial strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'PI:NAME:<NAME>END_PI', {length: 5}
#Only writes hello
len.should.eql 5
cleverBufferWriter.getOffset().should.eql 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write partial multi-byte strings using length (number of bytes)', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'PI:NAME:<NAME>END_PI', {length: 4}
# Only writes hél
len.should.eql 4
cleverBufferWriter.getOffset().should.eql 4
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x68, 0xc3, 0xa9, 0x6c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'does not write partially encoded characters', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
len = cleverBufferWriter.writeString 'éè', {length: 3}
# Only writes é
len.should.eql 2
cleverBufferWriter.getOffset().should.eql 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0xc3, 0xa9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
]
it 'should write string at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLO', {offset: 5}
#Writes hello starting at offset 5
cleverBufferWriter.getOffset().should.eql 0
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
it 'does nothing silently when writing past the length', ->
buf = new Buffer [0x0]
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt8(1)
cleverBuffer.writeUInt8(1)
buf.should.eql new Buffer [0x1]
it 'throws an exception when writing past the length with noAssert off', ->
buf = new Buffer [0x1]
cleverBuffer = new CleverBufferWriter buf, {noAssert: false}
cleverBuffer.writeUInt8(1)
(-> cleverBuffer.writeUInt8(1)).should.throw()
describe 'leading zeros are handling correctly', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle leading zero for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] "00123"
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] "-00123"
buf1.should.eql buf2
describe 'check we handle numbers and strings identically', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
unsigned: [false, true]
bigEndian: [false, true]
}
do ({size, unsigned, bigEndian} = testCase) ->
it "should correctly handle numbers and strings for #{JSON.stringify testCase}", ->
buf1 = new Buffer size
buf2 = new Buffer size
cleverBuffer1 = new CleverBufferWriter buf1,
bigEndian: bigEndian
noAssert: false
cleverBuffer2 = new CleverBufferWriter buf2,
bigEndian: bigEndian
noAssert: false
if unsigned
f = "writeUInt#{size*8}"
cleverBuffer1[f] "123"
cleverBuffer2[f] 123
else
f = "writeInt#{size*8}"
cleverBuffer1[f] "-123"
cleverBuffer2[f] -123
buf1.should.eql buf2
describe 'check only throwing exception for writing negative unsigned integers when noAssert:false', ->
for testCase in specHelper.cartesianProduct {
size: [1, 2, 4, 8]
bigEndian: [false, true]
}
do ({size, bigEndian} = testCase) ->
it "should throw for noAssert:false #{JSON.stringify testCase}", ->
cleverBuffer = new CleverBufferWriter (new Buffer size),
bigEndian: bigEndian
noAssert: false
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.throw(TypeError)
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.throw(TypeError)
it "should not throw for noAssert:true #{JSON.stringify testCase}", ->
buf = new Buffer size
cleverBuffer = new CleverBufferWriter buf,
bigEndian: bigEndian
noAssert: true
(-> cleverBuffer["writeUInt#{size*8}"]("-1")).should.not.throw()
(-> cleverBuffer["writeUInt#{size*8}"](-1)).should.not.throw()
|
[
{
"context": "uire '../lib/salsa20'\n\nkey = WordArray.from_utf8 '11112222333344445555666677778888'\niv = WordArray.from_utf8 'aaaabbbbccccddddeeeeff",
"end": 136,
"score": 0.9991670250892639,
"start": 104,
"tag": "KEY",
"value": "11112222333344445555666677778888"
},
{
"context": "aa... | dev/salsa_enc.iced | CyberFlameGO/triplesec | 274 | {WordArray} = require '../lib/wordarray'
salsa20 = require '../lib/salsa20'
key = WordArray.from_utf8 '11112222333344445555666677778888'
iv = WordArray.from_utf8 'aaaabbbbccccddddeeeeffff'
text = "hello my name is max and i work at crashmix LLC. we don't have a product."
input = WordArray.from_utf8 text
console.log input.to_hex()
x = salsa20.encrypt { key, iv, input }
console.log x.to_hex()
y = salsa20.encrypt { key, iv, input : x }
console.log y.to_hex()
console.log y.to_utf8()
| 112752 | {WordArray} = require '../lib/wordarray'
salsa20 = require '../lib/salsa20'
key = WordArray.from_utf8 '<KEY>'
iv = WordArray.from_utf8 'aaaabbbbccccddddeeeeffff'
text = "hello my name is <NAME> and i work at crashmix LLC. we don't have a product."
input = WordArray.from_utf8 text
console.log input.to_hex()
x = salsa20.encrypt { key, iv, input }
console.log x.to_hex()
y = salsa20.encrypt { key, iv, input : x }
console.log y.to_hex()
console.log y.to_utf8()
| true | {WordArray} = require '../lib/wordarray'
salsa20 = require '../lib/salsa20'
key = WordArray.from_utf8 'PI:KEY:<KEY>END_PI'
iv = WordArray.from_utf8 'aaaabbbbccccddddeeeeffff'
text = "hello my name is PI:NAME:<NAME>END_PI and i work at crashmix LLC. we don't have a product."
input = WordArray.from_utf8 text
console.log input.to_hex()
x = salsa20.encrypt { key, iv, input }
console.log x.to_hex()
y = salsa20.encrypt { key, iv, input : x }
console.log y.to_hex()
console.log y.to_utf8()
|
[
{
"context": "ebaseUtil(robot, \"name\")와 같이 사용한다.\n#\n# Author:\n# river-mountain\n\n\nFirebase = require 'firebase'\nFirebaseTokenGene",
"end": 296,
"score": 0.9995360374450684,
"start": 282,
"tag": "USERNAME",
"value": "river-mountain"
},
{
"context": ", 계정 분리를 위해 구 파베 사용 추후 마이그... | scripts/firebase-util.coffee | kyunooh/bibly-bot | 0 | # Description
# Firebase를 토큰을 통해 연결해주는 유틸
#
# Dependencies:
# "firebase": "^2.2.7"
# "firebase-token-generator": "^2.0.0"
#
# Commands:
# Firebase를 사용하고자 하는 곳에서 FirebaseUtil = require './firebase-util' 을 선언하고
# fb = new FirebaseUtil(robot, "name")와 같이 사용한다.
#
# Author:
# river-mountain
Firebase = require 'firebase'
FirebaseTokenGenerator = require 'firebase-token-generator'
FIREBASE_URL = "https://9xd-bot.firebaseio.com/" # 구파이어베이스, 계정 분리를 위해 구 파베 사용 추후 마이그레이션!
FIREBASE_SECRET = "b1z9KEjv9ts33CN29eDJflET97EkV0KTSnmvpjlA" # 문제시 ignore로 빼거나 env에 넣기.
module.exports = (robot, ref) ->
if ref?
fb = new Firebase(FIREBASE_URL)
fb = fb.child(ref)
robot.logger.info ref
if FIREBASE_SECRET?
tokenGenerator = new FirebaseTokenGenerator FIREBASE_SECRET
token = tokenGenerator.createToken({ "uid": "hubot", "hubot": true })
fb.authWithCustomToken token, (error, authData) ->
if error
robot.logger.warning '인증실패', error
return fb
else
robot.logger.warning "Ref is undefined"
| 33031 | # Description
# Firebase를 토큰을 통해 연결해주는 유틸
#
# Dependencies:
# "firebase": "^2.2.7"
# "firebase-token-generator": "^2.0.0"
#
# Commands:
# Firebase를 사용하고자 하는 곳에서 FirebaseUtil = require './firebase-util' 을 선언하고
# fb = new FirebaseUtil(robot, "name")와 같이 사용한다.
#
# Author:
# river-mountain
Firebase = require 'firebase'
FirebaseTokenGenerator = require 'firebase-token-generator'
FIREBASE_URL = "https://9xd-bot.firebaseio.com/" # 구파이어베이스, 계정 분리를 위해 구 파베 사용 추후 마이그레이션!
FIREBASE_SECRET = "<KEY>" # 문제시 ignore로 빼거나 env에 넣기.
module.exports = (robot, ref) ->
if ref?
fb = new Firebase(FIREBASE_URL)
fb = fb.child(ref)
robot.logger.info ref
if FIREBASE_SECRET?
tokenGenerator = new FirebaseTokenGenerator FIREBASE_SECRET
token = tokenGenerator.createToken({ "uid": "hubot", "hubot": true })
fb.authWithCustomToken token, (error, authData) ->
if error
robot.logger.warning '인증실패', error
return fb
else
robot.logger.warning "Ref is undefined"
| true | # Description
# Firebase를 토큰을 통해 연결해주는 유틸
#
# Dependencies:
# "firebase": "^2.2.7"
# "firebase-token-generator": "^2.0.0"
#
# Commands:
# Firebase를 사용하고자 하는 곳에서 FirebaseUtil = require './firebase-util' 을 선언하고
# fb = new FirebaseUtil(robot, "name")와 같이 사용한다.
#
# Author:
# river-mountain
Firebase = require 'firebase'
FirebaseTokenGenerator = require 'firebase-token-generator'
FIREBASE_URL = "https://9xd-bot.firebaseio.com/" # 구파이어베이스, 계정 분리를 위해 구 파베 사용 추후 마이그레이션!
FIREBASE_SECRET = "PI:KEY:<KEY>END_PI" # 문제시 ignore로 빼거나 env에 넣기.
module.exports = (robot, ref) ->
if ref?
fb = new Firebase(FIREBASE_URL)
fb = fb.child(ref)
robot.logger.info ref
if FIREBASE_SECRET?
tokenGenerator = new FirebaseTokenGenerator FIREBASE_SECRET
token = tokenGenerator.createToken({ "uid": "hubot", "hubot": true })
fb.authWithCustomToken token, (error, authData) ->
if error
robot.logger.warning '인증실패', error
return fb
else
robot.logger.warning "Ref is undefined"
|
[
{
"context": "ception-reporting\":\n\t\tuserId: \"4b2dfe3a-7eef-a871-4d35-230cb11366f8\"\n\t\"file-icons\":\n\t\tcoloured: false",
"end": 1736,
"score": 0.4188135266304016,
"start": 1735,
"tag": "PASSWORD",
"value": "4"
}
] | config.cson | Alhadis/Atom-PhoenixTheme | 0 | "*":
"atom-github-syntax":
matchFont: false
"auto-detect-indentation":
showSpacingInStatusBar: false
core:
customFileTypes:
"source.c": [
"share/misc/getopt"
]
"source.gitconfig": [
"config.worktree"
]
"source.makefile": [
"make"
"makefile"
"mk.config"
"mkfile"
]
"source.perl.5": [
"t"
]
"source.python": [
"WATCHLISTS"
"DEPS"
"gclient"
"gclient_entries"
]
"source.shell": [
"profile"
]
"source.yaml": [
"clang-tidy"
"gemrc"
"glide.lock"
"mir"
"reek"
"rviz"
"syntax"
"yaml-tmlanguage"
"yaml.sed"
"yarn.lock"
"yml.mysql"
]
"text.plain": [
"packages/injections/misc/latin-1"
"mail.tildehelp"
"COPYING"
"LICENSE"
"README"
"TODO"
]
"text.roff": [
"tmac.scover"
"tmac.sdisp"
"tmac.skeep"
"tmac.s"
"mm"
"ms"
]
"text.xml": [
"xmp"
]
disabledPackages: [
"autocomplete-css"
"autocomplete-plus"
"background-tips"
"exception-reporting"
"github"
"language-clojure"
"language-make"
"language-markdown"
"language-python"
"language-ruby-on-rails"
"welcome"
]
openEmptyEditorOnStart: false
packagesWithKeymapsDisabled: [
"language-markdown"
"advanced-open-file"
]
telemetryConsent: "no"
themes: [
"atom-light-ui"
"biro-syntax"
]
useTreeSitterParsers: false
versionPinnedPackages: [
"minimap"
]
"dash-ui":
iconsEnabled: false
themeColor: "Indigo"
editor:
atomicSoftTabs: false
autoIndentOnPaste: false
fontFamily: "Menlig"
lineHeight: 1.3
maxScreenLineLength: 1500
softTabs: false
tabLength: 4
"exception-reporting":
userId: "4b2dfe3a-7eef-a871-4d35-230cb11366f8"
"file-icons":
coloured: false
strategies: {}
"find-and-replace":
showSearchWrapIcon: false
"fuzzy-finder":
scoringSystem: "fast"
useRipGrep: true
"language-applescript":
autoCompile: false
"language-markdown":
addListItems: false
autoIncrementListItems: false
disableLanguageGfm: false
emphasisShortcuts: false
indentListItems: false
linkShortcuts: false
removeEmptyListItems: false
"markdown-preview":
grammars: [
"source.gfm"
"text.html.basic"
"text.md"
"text.plain"
"text.plain.null-grammar"
"text.xml"
]
useGitHubStyle: true
minimap:
autoToggle: false
"project-manager":
savePathsRelativeToHome: true
"seti-ui":
compactView: true
fileIcons: false
"spell-check":
grammars: []
noticesMode: "console"
"status-bar":
fullWidth: false
tabs: {}
"tree-view":
hideIgnoredNames: true
welcome:
showOnStartup: false
whitespace:
removeTrailingWhitespace: false
".applescript.source":
editor:
commentEnd: " *)"
commentStart: "(* "
".coffee.source":
editor:
decreaseIndentPattern: "^\\s*(\\}\\]?|\\]|\\b(else|catch|finally)\\b)$"
".css.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#$%^&*|+=[]{}`?-…"
".desc.ditroff.source":
editor:
tabLength: 16
".lisp.source":
editor:
softTabs: true
tabLength: 2
".powershell.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#%^&*|+=[]{}`?-…"
".python.source":
editor:
softTabs: false
".rust.source":
editor:
softTabs: false
| 98296 | "*":
"atom-github-syntax":
matchFont: false
"auto-detect-indentation":
showSpacingInStatusBar: false
core:
customFileTypes:
"source.c": [
"share/misc/getopt"
]
"source.gitconfig": [
"config.worktree"
]
"source.makefile": [
"make"
"makefile"
"mk.config"
"mkfile"
]
"source.perl.5": [
"t"
]
"source.python": [
"WATCHLISTS"
"DEPS"
"gclient"
"gclient_entries"
]
"source.shell": [
"profile"
]
"source.yaml": [
"clang-tidy"
"gemrc"
"glide.lock"
"mir"
"reek"
"rviz"
"syntax"
"yaml-tmlanguage"
"yaml.sed"
"yarn.lock"
"yml.mysql"
]
"text.plain": [
"packages/injections/misc/latin-1"
"mail.tildehelp"
"COPYING"
"LICENSE"
"README"
"TODO"
]
"text.roff": [
"tmac.scover"
"tmac.sdisp"
"tmac.skeep"
"tmac.s"
"mm"
"ms"
]
"text.xml": [
"xmp"
]
disabledPackages: [
"autocomplete-css"
"autocomplete-plus"
"background-tips"
"exception-reporting"
"github"
"language-clojure"
"language-make"
"language-markdown"
"language-python"
"language-ruby-on-rails"
"welcome"
]
openEmptyEditorOnStart: false
packagesWithKeymapsDisabled: [
"language-markdown"
"advanced-open-file"
]
telemetryConsent: "no"
themes: [
"atom-light-ui"
"biro-syntax"
]
useTreeSitterParsers: false
versionPinnedPackages: [
"minimap"
]
"dash-ui":
iconsEnabled: false
themeColor: "Indigo"
editor:
atomicSoftTabs: false
autoIndentOnPaste: false
fontFamily: "Menlig"
lineHeight: 1.3
maxScreenLineLength: 1500
softTabs: false
tabLength: 4
"exception-reporting":
userId: "4b2dfe3a-7eef-a871-<PASSWORD>d35-230cb11366f8"
"file-icons":
coloured: false
strategies: {}
"find-and-replace":
showSearchWrapIcon: false
"fuzzy-finder":
scoringSystem: "fast"
useRipGrep: true
"language-applescript":
autoCompile: false
"language-markdown":
addListItems: false
autoIncrementListItems: false
disableLanguageGfm: false
emphasisShortcuts: false
indentListItems: false
linkShortcuts: false
removeEmptyListItems: false
"markdown-preview":
grammars: [
"source.gfm"
"text.html.basic"
"text.md"
"text.plain"
"text.plain.null-grammar"
"text.xml"
]
useGitHubStyle: true
minimap:
autoToggle: false
"project-manager":
savePathsRelativeToHome: true
"seti-ui":
compactView: true
fileIcons: false
"spell-check":
grammars: []
noticesMode: "console"
"status-bar":
fullWidth: false
tabs: {}
"tree-view":
hideIgnoredNames: true
welcome:
showOnStartup: false
whitespace:
removeTrailingWhitespace: false
".applescript.source":
editor:
commentEnd: " *)"
commentStart: "(* "
".coffee.source":
editor:
decreaseIndentPattern: "^\\s*(\\}\\]?|\\]|\\b(else|catch|finally)\\b)$"
".css.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#$%^&*|+=[]{}`?-…"
".desc.ditroff.source":
editor:
tabLength: 16
".lisp.source":
editor:
softTabs: true
tabLength: 2
".powershell.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#%^&*|+=[]{}`?-…"
".python.source":
editor:
softTabs: false
".rust.source":
editor:
softTabs: false
| true | "*":
"atom-github-syntax":
matchFont: false
"auto-detect-indentation":
showSpacingInStatusBar: false
core:
customFileTypes:
"source.c": [
"share/misc/getopt"
]
"source.gitconfig": [
"config.worktree"
]
"source.makefile": [
"make"
"makefile"
"mk.config"
"mkfile"
]
"source.perl.5": [
"t"
]
"source.python": [
"WATCHLISTS"
"DEPS"
"gclient"
"gclient_entries"
]
"source.shell": [
"profile"
]
"source.yaml": [
"clang-tidy"
"gemrc"
"glide.lock"
"mir"
"reek"
"rviz"
"syntax"
"yaml-tmlanguage"
"yaml.sed"
"yarn.lock"
"yml.mysql"
]
"text.plain": [
"packages/injections/misc/latin-1"
"mail.tildehelp"
"COPYING"
"LICENSE"
"README"
"TODO"
]
"text.roff": [
"tmac.scover"
"tmac.sdisp"
"tmac.skeep"
"tmac.s"
"mm"
"ms"
]
"text.xml": [
"xmp"
]
disabledPackages: [
"autocomplete-css"
"autocomplete-plus"
"background-tips"
"exception-reporting"
"github"
"language-clojure"
"language-make"
"language-markdown"
"language-python"
"language-ruby-on-rails"
"welcome"
]
openEmptyEditorOnStart: false
packagesWithKeymapsDisabled: [
"language-markdown"
"advanced-open-file"
]
telemetryConsent: "no"
themes: [
"atom-light-ui"
"biro-syntax"
]
useTreeSitterParsers: false
versionPinnedPackages: [
"minimap"
]
"dash-ui":
iconsEnabled: false
themeColor: "Indigo"
editor:
atomicSoftTabs: false
autoIndentOnPaste: false
fontFamily: "Menlig"
lineHeight: 1.3
maxScreenLineLength: 1500
softTabs: false
tabLength: 4
"exception-reporting":
userId: "4b2dfe3a-7eef-a871-PI:PASSWORD:<PASSWORD>END_PId35-230cb11366f8"
"file-icons":
coloured: false
strategies: {}
"find-and-replace":
showSearchWrapIcon: false
"fuzzy-finder":
scoringSystem: "fast"
useRipGrep: true
"language-applescript":
autoCompile: false
"language-markdown":
addListItems: false
autoIncrementListItems: false
disableLanguageGfm: false
emphasisShortcuts: false
indentListItems: false
linkShortcuts: false
removeEmptyListItems: false
"markdown-preview":
grammars: [
"source.gfm"
"text.html.basic"
"text.md"
"text.plain"
"text.plain.null-grammar"
"text.xml"
]
useGitHubStyle: true
minimap:
autoToggle: false
"project-manager":
savePathsRelativeToHome: true
"seti-ui":
compactView: true
fileIcons: false
"spell-check":
grammars: []
noticesMode: "console"
"status-bar":
fullWidth: false
tabs: {}
"tree-view":
hideIgnoredNames: true
welcome:
showOnStartup: false
whitespace:
removeTrailingWhitespace: false
".applescript.source":
editor:
commentEnd: " *)"
commentStart: "(* "
".coffee.source":
editor:
decreaseIndentPattern: "^\\s*(\\}\\]?|\\]|\\b(else|catch|finally)\\b)$"
".css.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#$%^&*|+=[]{}`?-…"
".desc.ditroff.source":
editor:
tabLength: 16
".lisp.source":
editor:
softTabs: true
tabLength: 2
".powershell.source":
editor:
nonWordCharacters: "/\\()\"':,.;<>~!@#%^&*|+=[]{}`?-…"
".python.source":
editor:
softTabs: false
".rust.source":
editor:
softTabs: false
|
[
{
"context": "\n\n passwordId: (->\n id = @get('model.id') || 'new'\n \"password-#{id}\"\n ).property('model.id')\n\n\n",
"end": 1286,
"score": 0.7220396995544434,
"start": 1283,
"tag": "PASSWORD",
"value": "new"
},
{
"context": "rdId: (->\n id = @get('model.id') || 'new'\... | app/assets/javascripts/mixins/agent_form.js.coffee | fwoeck/voice-rails | 1 | Voice.AgentForm = Ember.Mixin.create({
needs: ['agents', 'users']
formEl: null
uiLocales: env.uiLocales
actions:
noop: ->
false
safeRecord: ->
if @get('dirty')
@validateForm()
Ember.run.next @, @submitForm
false
cancelChanges: ->
@resetForm()
false
init: ->
@_super()
@initAttributeArrays()
@setAttributeArrays()
initAttributeArrays: ->
@set 'roleArray', Ember.A()
@set 'skillArray', Ember.A()
@set 'languageArray', Ember.A()
validateForm: ->
@get('formEl').find('form').trigger('validate.fndtn.abide')
dirty: (->
agent = @get('model')
agent && agent.get('isDirty')
).property('model.isDirty')
submitForm: ->
el = @get('formEl')
errorCount = el.find('form div.error').length
if errorCount == 0
@saveAgentData(el)
else
app.showDefaultError(i18n.dialog.form_with_errors)
saveAgentData: (el) ->
spin = el.find('.fa-refresh')
@enableSpinner(spin)
@get('model').save().then (=>
@clearAgent()
), (->)
enableSpinner: (spin) ->
spin.addClass('fa-spin')
Ember.run.later @, (->
spin.removeClass('fa-spin')
), 1000
passwordId: (->
id = @get('model.id') || 'new'
"password-#{id}"
).property('model.id')
clearAgent: ->
model = @get('model')
if model.get('isNew')
model.remove()
@set 'model', @store.createRecord(Voice.User)
else
model.rollback()
model.reloadOnLocaleUpdate()
resetForm: ->
@clearAgent()
@setAttributeArrays()
@hideErrors()
hideErrors: ->
@get('formEl').find('form div.error').removeClass('error')
setAttributeArrays: ->
@setLanguageArray()
@setSkillArray()
@setRoleArray()
skillSelection: (->
env.skillSelection
).property()
languageSelection: (->
env.languageSelection
).property()
roleSelection: (->
env.roleSelection
).property()
setArray: (attr, selection) ->
all = @get("model.#{attr}s") || []
arr = selection.filter (n) -> all.indexOf(n.id) > -1
old = @get("#{attr}Array")
@set("#{attr}Array", arr) if Ember.compare(old, arr)
serializeArray: (attr) ->
arr = @get("#{attr}Array").map((n) -> n.id).sort()
old = @get("model.#{attr}s")
@set("model.#{attr}s", arr) if Ember.compare(old, arr)
setRoleArray: (->
@setArray('role', env.roleSelection)
).observes('model.roles.[]')
observeRoleArray: (->
@serializeArray('role')
).observes('roleArray.[]')
setSkillArray: (->
@setArray('skill', env.skillSelection)
).observes('model.skills.[]')
observeSkillArray: (->
@serializeArray('skill')
).observes('skillArray.[]')
setLanguageArray: (->
@setArray('language', env.languageSelection)
).observes('model.languages.[]')
observeLanguageArray: (->
@serializeArray('language')
).observes('languageArray.[]')
})
| 78500 | Voice.AgentForm = Ember.Mixin.create({
needs: ['agents', 'users']
formEl: null
uiLocales: env.uiLocales
actions:
noop: ->
false
safeRecord: ->
if @get('dirty')
@validateForm()
Ember.run.next @, @submitForm
false
cancelChanges: ->
@resetForm()
false
init: ->
@_super()
@initAttributeArrays()
@setAttributeArrays()
initAttributeArrays: ->
@set 'roleArray', Ember.A()
@set 'skillArray', Ember.A()
@set 'languageArray', Ember.A()
validateForm: ->
@get('formEl').find('form').trigger('validate.fndtn.abide')
dirty: (->
agent = @get('model')
agent && agent.get('isDirty')
).property('model.isDirty')
submitForm: ->
el = @get('formEl')
errorCount = el.find('form div.error').length
if errorCount == 0
@saveAgentData(el)
else
app.showDefaultError(i18n.dialog.form_with_errors)
saveAgentData: (el) ->
spin = el.find('.fa-refresh')
@enableSpinner(spin)
@get('model').save().then (=>
@clearAgent()
), (->)
enableSpinner: (spin) ->
spin.addClass('fa-spin')
Ember.run.later @, (->
spin.removeClass('fa-spin')
), 1000
passwordId: (->
id = @get('model.id') || '<PASSWORD>'
"<PASSWORD>-#{id}"
).property('model.id')
clearAgent: ->
model = @get('model')
if model.get('isNew')
model.remove()
@set 'model', @store.createRecord(Voice.User)
else
model.rollback()
model.reloadOnLocaleUpdate()
resetForm: ->
@clearAgent()
@setAttributeArrays()
@hideErrors()
hideErrors: ->
@get('formEl').find('form div.error').removeClass('error')
setAttributeArrays: ->
@setLanguageArray()
@setSkillArray()
@setRoleArray()
skillSelection: (->
env.skillSelection
).property()
languageSelection: (->
env.languageSelection
).property()
roleSelection: (->
env.roleSelection
).property()
setArray: (attr, selection) ->
all = @get("model.#{attr}s") || []
arr = selection.filter (n) -> all.indexOf(n.id) > -1
old = @get("#{attr}Array")
@set("#{attr}Array", arr) if Ember.compare(old, arr)
serializeArray: (attr) ->
arr = @get("#{attr}Array").map((n) -> n.id).sort()
old = @get("model.#{attr}s")
@set("model.#{attr}s", arr) if Ember.compare(old, arr)
setRoleArray: (->
@setArray('role', env.roleSelection)
).observes('model.roles.[]')
observeRoleArray: (->
@serializeArray('role')
).observes('roleArray.[]')
setSkillArray: (->
@setArray('skill', env.skillSelection)
).observes('model.skills.[]')
observeSkillArray: (->
@serializeArray('skill')
).observes('skillArray.[]')
setLanguageArray: (->
@setArray('language', env.languageSelection)
).observes('model.languages.[]')
observeLanguageArray: (->
@serializeArray('language')
).observes('languageArray.[]')
})
| true | Voice.AgentForm = Ember.Mixin.create({
needs: ['agents', 'users']
formEl: null
uiLocales: env.uiLocales
actions:
noop: ->
false
safeRecord: ->
if @get('dirty')
@validateForm()
Ember.run.next @, @submitForm
false
cancelChanges: ->
@resetForm()
false
init: ->
@_super()
@initAttributeArrays()
@setAttributeArrays()
initAttributeArrays: ->
@set 'roleArray', Ember.A()
@set 'skillArray', Ember.A()
@set 'languageArray', Ember.A()
validateForm: ->
@get('formEl').find('form').trigger('validate.fndtn.abide')
dirty: (->
agent = @get('model')
agent && agent.get('isDirty')
).property('model.isDirty')
submitForm: ->
el = @get('formEl')
errorCount = el.find('form div.error').length
if errorCount == 0
@saveAgentData(el)
else
app.showDefaultError(i18n.dialog.form_with_errors)
saveAgentData: (el) ->
spin = el.find('.fa-refresh')
@enableSpinner(spin)
@get('model').save().then (=>
@clearAgent()
), (->)
enableSpinner: (spin) ->
spin.addClass('fa-spin')
Ember.run.later @, (->
spin.removeClass('fa-spin')
), 1000
passwordId: (->
id = @get('model.id') || 'PI:PASSWORD:<PASSWORD>END_PI'
"PI:PASSWORD:<PASSWORD>END_PI-#{id}"
).property('model.id')
clearAgent: ->
model = @get('model')
if model.get('isNew')
model.remove()
@set 'model', @store.createRecord(Voice.User)
else
model.rollback()
model.reloadOnLocaleUpdate()
resetForm: ->
@clearAgent()
@setAttributeArrays()
@hideErrors()
hideErrors: ->
@get('formEl').find('form div.error').removeClass('error')
setAttributeArrays: ->
@setLanguageArray()
@setSkillArray()
@setRoleArray()
skillSelection: (->
env.skillSelection
).property()
languageSelection: (->
env.languageSelection
).property()
roleSelection: (->
env.roleSelection
).property()
setArray: (attr, selection) ->
all = @get("model.#{attr}s") || []
arr = selection.filter (n) -> all.indexOf(n.id) > -1
old = @get("#{attr}Array")
@set("#{attr}Array", arr) if Ember.compare(old, arr)
serializeArray: (attr) ->
arr = @get("#{attr}Array").map((n) -> n.id).sort()
old = @get("model.#{attr}s")
@set("model.#{attr}s", arr) if Ember.compare(old, arr)
setRoleArray: (->
@setArray('role', env.roleSelection)
).observes('model.roles.[]')
observeRoleArray: (->
@serializeArray('role')
).observes('roleArray.[]')
setSkillArray: (->
@setArray('skill', env.skillSelection)
).observes('model.skills.[]')
observeSkillArray: (->
@serializeArray('skill')
).observes('skillArray.[]')
setLanguageArray: (->
@setArray('language', env.languageSelection)
).observes('model.languages.[]')
observeLanguageArray: (->
@serializeArray('language')
).observes('languageArray.[]')
})
|
[
{
"context": " activity =\n actor:\n displayName: actor\n verb: verb\n labels: la",
"end": 2371,
"score": 0.9715564846992493,
"start": 2366,
"tag": "NAME",
"value": "actor"
}
] | server/lib/fake_it.coffee | assaf/vanity.js | 2 | # To populate ElasticSearch with 1000 activities over 3 days:
# coffee lib/take_if 1000 localhost:3000
#
# The first argument is number of iterations, optional and defaults to 1000.
#
# The second argument is hostname:port, optional and defaults to localhost:3000.
assert = require("assert")
Async = require("async")
Crypto = require("crypto")
Request = require("request")
Timekeeper = require("timekeeper")
BTree = require("./names/b_tree")
name = require("./names")
redis = require("../config/redis")
Activity = require("../models/activity")
SplitTest = require("../models/split_test")
require("sugar")
# Number of activities to create
COUNT = parseInt(process.argv[2] || 1000)
# Distributed over this many days
DAYS = Math.ceil(COUNT / 300)
# Using this set of verbs
VERBS = ["posted", "commented", "replied", "mentioned"]
# Labels to choose from
LABELS = ["funny", "stupid", "smart"]
# Server URL
HOST = process.argv[3] || "localhost:3000"
# Activities not distributed evenly between hours of the day, use hourly(random) to get a made up distribution
hourly_dist = BTree()
cumul = 0
for hour, pct of [1,1,0,0,1,1,2,4,8,10,12,10, 9,8,6,5,5,4,4,3,2,2,1,1]
cumul += pct
hourly_dist.add cumul, hour
assert.equal cumul, 100, "Bad distribution"
hourly = hourly_dist.done()
fakeActivity = (host, count, callback)->
# Delete and re-create index
queue = []
console.log "Creating index ..."
Activity.createIndex ->
console.log "Populating ElasticSearch with #{COUNT} activities ..."
for i in [0...COUNT]
# Random published day within the past DAYS, hour based on the distribution.
days = Math.floor(Math.random() * DAYS)
assert days >= 0 && days < DAYS, "Wrong day"
hour = hourly(Math.random() * 100)
assert hour >= 0 && hour < 24, "Wrong hour"
published = Date.create().addDays(-days).addHours(-hour).addMinutes(-Math.random() * 60)
# Actor name and verb
actor = name(Math.random() * COUNT / 3)
verb = VERBS[Math.floor(Math.random() * VERBS.length)]
assert actor && verb, "Missing actor or verb"
# Pick up to 3 labels
labels = []
for j in [1..3]
label = LABELS[Math.floor(Math.random() * 15)]
if label
labels.push label
activity =
actor:
displayName: actor
verb: verb
labels: labels
location: "123 Main St, San Francisco, CA"
activity.published = published
do (activity)->
queue.push (done)->
Activity.create activity, done
Async.series queue,
(error)->
if error
callback(error)
else
console.log "Published #{COUNT} activities"
callback()
fakeSplitTest = (count, callback)->
Async.waterfall [
(done)->
console.log "Wipe clean any split-test data"
redis.keys "#{redis.prefix}.*", (error, keys)->
if keys.length == 0
done(null, 0)
else
redis.del keys..., done
, (_, done)->
# Make unique participant identifier
newId = ->
Crypto.createHash("md5").update(Math.random().toString()).digest("hex")
# Load up on identifiers
ids = (newId() for i in [0...count])
done(null, ids)
, (ids, done)->
Timekeeper.travel Date.create().addDays(-count / 144)
# Create participants from these IDs. Do that serially, since we're playing
# with current time.
Async.forEachSeries ids, (id, each)->
Timekeeper.travel Date.create().addMinutes(10) # there are 144 of these in a day
alternative = Math.floor(Math.random() * 2)
SplitTest.participated "foo-bar", id, alternative, ->
if Math.random() < 0.10
SplitTest.completed "foo-bar", id, each
else
each()
, done
, (done)->
console.log "Published #{count} data points"
Timekeeper.reset()
done()
], callback
Async.series [
(done)->
fakeActivity HOST, COUNT, done
, (done)->
fakeSplitTest COUNT, done
], (error)->
throw error if error
console.log "Done"
process.exit 0
| 128162 | # To populate ElasticSearch with 1000 activities over 3 days:
# coffee lib/take_if 1000 localhost:3000
#
# The first argument is number of iterations, optional and defaults to 1000.
#
# The second argument is hostname:port, optional and defaults to localhost:3000.
assert = require("assert")
Async = require("async")
Crypto = require("crypto")
Request = require("request")
Timekeeper = require("timekeeper")
BTree = require("./names/b_tree")
name = require("./names")
redis = require("../config/redis")
Activity = require("../models/activity")
SplitTest = require("../models/split_test")
require("sugar")
# Number of activities to create
COUNT = parseInt(process.argv[2] || 1000)
# Distributed over this many days
DAYS = Math.ceil(COUNT / 300)
# Using this set of verbs
VERBS = ["posted", "commented", "replied", "mentioned"]
# Labels to choose from
LABELS = ["funny", "stupid", "smart"]
# Server URL
HOST = process.argv[3] || "localhost:3000"
# Activities not distributed evenly between hours of the day, use hourly(random) to get a made up distribution
hourly_dist = BTree()
cumul = 0
for hour, pct of [1,1,0,0,1,1,2,4,8,10,12,10, 9,8,6,5,5,4,4,3,2,2,1,1]
cumul += pct
hourly_dist.add cumul, hour
assert.equal cumul, 100, "Bad distribution"
hourly = hourly_dist.done()
fakeActivity = (host, count, callback)->
# Delete and re-create index
queue = []
console.log "Creating index ..."
Activity.createIndex ->
console.log "Populating ElasticSearch with #{COUNT} activities ..."
for i in [0...COUNT]
# Random published day within the past DAYS, hour based on the distribution.
days = Math.floor(Math.random() * DAYS)
assert days >= 0 && days < DAYS, "Wrong day"
hour = hourly(Math.random() * 100)
assert hour >= 0 && hour < 24, "Wrong hour"
published = Date.create().addDays(-days).addHours(-hour).addMinutes(-Math.random() * 60)
# Actor name and verb
actor = name(Math.random() * COUNT / 3)
verb = VERBS[Math.floor(Math.random() * VERBS.length)]
assert actor && verb, "Missing actor or verb"
# Pick up to 3 labels
labels = []
for j in [1..3]
label = LABELS[Math.floor(Math.random() * 15)]
if label
labels.push label
activity =
actor:
displayName: <NAME>
verb: verb
labels: labels
location: "123 Main St, San Francisco, CA"
activity.published = published
do (activity)->
queue.push (done)->
Activity.create activity, done
Async.series queue,
(error)->
if error
callback(error)
else
console.log "Published #{COUNT} activities"
callback()
fakeSplitTest = (count, callback)->
Async.waterfall [
(done)->
console.log "Wipe clean any split-test data"
redis.keys "#{redis.prefix}.*", (error, keys)->
if keys.length == 0
done(null, 0)
else
redis.del keys..., done
, (_, done)->
# Make unique participant identifier
newId = ->
Crypto.createHash("md5").update(Math.random().toString()).digest("hex")
# Load up on identifiers
ids = (newId() for i in [0...count])
done(null, ids)
, (ids, done)->
Timekeeper.travel Date.create().addDays(-count / 144)
# Create participants from these IDs. Do that serially, since we're playing
# with current time.
Async.forEachSeries ids, (id, each)->
Timekeeper.travel Date.create().addMinutes(10) # there are 144 of these in a day
alternative = Math.floor(Math.random() * 2)
SplitTest.participated "foo-bar", id, alternative, ->
if Math.random() < 0.10
SplitTest.completed "foo-bar", id, each
else
each()
, done
, (done)->
console.log "Published #{count} data points"
Timekeeper.reset()
done()
], callback
Async.series [
(done)->
fakeActivity HOST, COUNT, done
, (done)->
fakeSplitTest COUNT, done
], (error)->
throw error if error
console.log "Done"
process.exit 0
| true | # To populate ElasticSearch with 1000 activities over 3 days:
# coffee lib/take_if 1000 localhost:3000
#
# The first argument is number of iterations, optional and defaults to 1000.
#
# The second argument is hostname:port, optional and defaults to localhost:3000.
assert = require("assert")
Async = require("async")
Crypto = require("crypto")
Request = require("request")
Timekeeper = require("timekeeper")
BTree = require("./names/b_tree")
name = require("./names")
redis = require("../config/redis")
Activity = require("../models/activity")
SplitTest = require("../models/split_test")
require("sugar")
# Number of activities to create
COUNT = parseInt(process.argv[2] || 1000)
# Distributed over this many days
DAYS = Math.ceil(COUNT / 300)
# Using this set of verbs
VERBS = ["posted", "commented", "replied", "mentioned"]
# Labels to choose from
LABELS = ["funny", "stupid", "smart"]
# Server URL
HOST = process.argv[3] || "localhost:3000"
# Activities not distributed evenly between hours of the day, use hourly(random) to get a made up distribution
hourly_dist = BTree()
cumul = 0
for hour, pct of [1,1,0,0,1,1,2,4,8,10,12,10, 9,8,6,5,5,4,4,3,2,2,1,1]
cumul += pct
hourly_dist.add cumul, hour
assert.equal cumul, 100, "Bad distribution"
hourly = hourly_dist.done()
fakeActivity = (host, count, callback)->
# Delete and re-create index
queue = []
console.log "Creating index ..."
Activity.createIndex ->
console.log "Populating ElasticSearch with #{COUNT} activities ..."
for i in [0...COUNT]
# Random published day within the past DAYS, hour based on the distribution.
days = Math.floor(Math.random() * DAYS)
assert days >= 0 && days < DAYS, "Wrong day"
hour = hourly(Math.random() * 100)
assert hour >= 0 && hour < 24, "Wrong hour"
published = Date.create().addDays(-days).addHours(-hour).addMinutes(-Math.random() * 60)
# Actor name and verb
actor = name(Math.random() * COUNT / 3)
verb = VERBS[Math.floor(Math.random() * VERBS.length)]
assert actor && verb, "Missing actor or verb"
# Pick up to 3 labels
labels = []
for j in [1..3]
label = LABELS[Math.floor(Math.random() * 15)]
if label
labels.push label
activity =
actor:
displayName: PI:NAME:<NAME>END_PI
verb: verb
labels: labels
location: "123 Main St, San Francisco, CA"
activity.published = published
do (activity)->
queue.push (done)->
Activity.create activity, done
Async.series queue,
(error)->
if error
callback(error)
else
console.log "Published #{COUNT} activities"
callback()
fakeSplitTest = (count, callback)->
Async.waterfall [
(done)->
console.log "Wipe clean any split-test data"
redis.keys "#{redis.prefix}.*", (error, keys)->
if keys.length == 0
done(null, 0)
else
redis.del keys..., done
, (_, done)->
# Make unique participant identifier
newId = ->
Crypto.createHash("md5").update(Math.random().toString()).digest("hex")
# Load up on identifiers
ids = (newId() for i in [0...count])
done(null, ids)
, (ids, done)->
Timekeeper.travel Date.create().addDays(-count / 144)
# Create participants from these IDs. Do that serially, since we're playing
# with current time.
Async.forEachSeries ids, (id, each)->
Timekeeper.travel Date.create().addMinutes(10) # there are 144 of these in a day
alternative = Math.floor(Math.random() * 2)
SplitTest.participated "foo-bar", id, alternative, ->
if Math.random() < 0.10
SplitTest.completed "foo-bar", id, each
else
each()
, done
, (done)->
console.log "Published #{count} data points"
Timekeeper.reset()
done()
], callback
Async.series [
(done)->
fakeActivity HOST, COUNT, done
, (done)->
fakeSplitTest COUNT, done
], (error)->
throw error if error
console.log "Done"
process.exit 0
|
[
{
"context": "'use strict'\n#\n# Ethan Mick\n# 2015\n#\nQ = require 'q'\nmongoose = require('mong",
"end": 27,
"score": 0.9996620416641235,
"start": 17,
"tag": "NAME",
"value": "Ethan Mick"
}
] | init.coffee | ethanmick/future-server | 0 | 'use strict'
#
# Ethan Mick
# 2015
#
Q = require 'q'
mongoose = require('mongoose-q')()
log = require './lib/helpers/log'
Server = require './lib/server'
mongo = require './lib/helpers/mongo'
server = new Server(port: 8124)
mongo().then ->
server.start()
.then ->
server.routes()
.then ->
log.warn 'Future has started.'
.fail(log.error)
.done()
| 2150 | 'use strict'
#
# <NAME>
# 2015
#
Q = require 'q'
mongoose = require('mongoose-q')()
log = require './lib/helpers/log'
Server = require './lib/server'
mongo = require './lib/helpers/mongo'
server = new Server(port: 8124)
mongo().then ->
server.start()
.then ->
server.routes()
.then ->
log.warn 'Future has started.'
.fail(log.error)
.done()
| true | 'use strict'
#
# PI:NAME:<NAME>END_PI
# 2015
#
Q = require 'q'
mongoose = require('mongoose-q')()
log = require './lib/helpers/log'
Server = require './lib/server'
mongo = require './lib/helpers/mongo'
server = new Server(port: 8124)
mongo().then ->
server.start()
.then ->
server.routes()
.then ->
log.warn 'Future has started.'
.fail(log.error)
.done()
|
[
{
"context": "#\n# Copyright (c) 2012 Konstantin Bender.\n#\n# Permission is hereby granted, free of charge",
"end": 40,
"score": 0.9998645782470703,
"start": 23,
"tag": "NAME",
"value": "Konstantin Bender"
},
{
"context": "'s properties as keys\", ->\n peter = name: \"Peter\"... | tests/test-array.coffee | konstantinbe/milk | 0 | #
# Copyright (c) 2012 Konstantin Bender.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Array", ->
describe "#each()", ->
it "iterates over all objects in a collection", ->
squared = []
[1, 2, 3].each (number) -> squared.add number * number
expect(squared).to_equal [1, 4, 9]
describe "#collect()", ->
it "returns all objects after applying block on them", ->
expect([1, 2, 3].collect (number) -> number * number).to_equal [1, 4, 9]
describe "#select()", ->
it "returns all objects for which the block returns true", ->
expect([1, 2, 3, 4, 5].select (number) -> number == 3).to_equal [3]
describe "#reject()", ->
it "returns all objects for which the block returns false", ->
expect([1, 2, 3, 4, 5].reject (number) -> number == 3).to_equal [1, 2, 4, 5]
describe "#partition()", ->
it "partitions the elements into: [selected, rejected], where selected contains all truthy values and rejected contains all falsy values", ->
expect(['hello', null, 42, false, true, undefined, 17].partition()).to_equal [['hello', 42, true, 17], [null, false, undefined]]
it "also accepts a block that is called for each value returning either true or false", ->
expect([1..10].partition (value) -> value % 2 == 0).to_equal [[2, 4, 6, 8, 10], [1, 3, 5, 7, 9]]
describe "#detect()", ->
it "returns the first object for which the block returns true", ->
expect([1, 2, 3, 4, 5].detect (number) -> number == 3).to_be 3
describe "#all()", ->
it "returns true if block returns true for all objects", ->
expect([1, 2, 3].all (number) -> true).to_be true
it "returns false if block returns false for at least one object", ->
expect([1, 2, 3].all (number) -> if number == 3 then false else true).to_be false
describe "#any()", ->
it "returns true if block returns true for at least one object", ->
expect([1, 2, 3].any (number) -> if number == 2 then true else false).to_be true
it "returns false if block returns false for all objects", ->
expect([1, 2, 3].any (number) -> false).to_be false
describe "#max()", ->
it "returns the max value", ->
expect([1, 3, 2, 4, 1].max()).to_be 4
it "returns null if empty", ->
expect([].max()).to_be null
describe "#min()", ->
it "returns the min value", ->
expect([1, 3, 2, -1, 1].min()).to_be -1
it "returns null if empty", ->
expect([].min()).to_be null
describe "#group_by()", ->
describe "when a key is passed", ->
it "returns a hash containing groups using the value of object's properties as keys", ->
peter = name: "Peter"
maxim = name: "Maxim"
inna1 = name: "Inna"
inna2 = name: "Inna"
groups = [peter, maxim, inna1, inna2].group_by 'name'
expect(groups["Peter"]).to_equal [peter]
expect(groups["Maxim"]).to_equal [maxim]
expect(groups["Inna"]).to_equal [inna1, inna2]
describe "when a block is passed", ->
it "returns a hash containing groups using the results of the block as keys", ->
groups = [0, 1, 2, 3, 4, 5].group_by (value) -> if value % 2 == 0 then "even" else "odd"
expect(groups["odd"]).to_equal [1, 3, 5]
expect(groups["even"]).to_equal [0, 2, 4]
describe "#inject()", ->
it "behaves like reduce() while taking the initial parameter as the first argument", ->
expect([1, 2, 4].inject 0, (sum, number) -> sum + number).to_be 7
it "returns the initial parameter if array is empty", ->
expect([].inject 123, (sum, number) -> sum + number).to_be 123
describe "#contains()", ->
it "returns true if collection contains value", ->
expect([1, 2, 3].contains 2).to_be true
it "returns false if collection does not contain the value", ->
expect([1, 2, 3].contains 4).to_be false
describe "#pluck()", ->
it "returns an array collecting the values for the given key", ->
people = [{name: "_peter", age: 59}, {name: "_esther", age: 45}, {name: "_heinerle", age: 4}]
expect(people.pluck 'name').to_equal ["_peter", "_esther", "_heinerle"]
describe "#count()", ->
it "returns the number of elements in a collection", ->
expect([].count()).to_be 0
expect([1, 2, 3].count()).to_be 3
expect([1, 2, 3, 3, 3].count()).to_be 5
describe "#is_empty()", ->
it "returns true if collection is empty", ->
expect([].is_empty()).to_be true
it "returns false if collection has at least one element", ->
expect([1].is_empty()).to_be false
expect([1, 2, 3].is_empty()).to_be false
describe "#first()", ->
it "returns the first element if |count| is not given", ->
expect([1, 2, 3].first()).to_be 1
expect([].first()).to_be_null()
it "returns a new array containing the first N elements if |count| = N is given", ->
expect([1, 2, 3].first 0).to_equal []
expect([1, 2, 3].first 1).to_equal [1]
expect([1, 2, 3].first 2).to_equal [1, 2]
expect([1, 2, 3].first 3).to_equal [1, 2, 3]
expect([1, 2, 3].first 10).to_equal [1, 2, 3]
describe "#second()", ->
it "returns the second element", ->
expect([1, 2, 3].second()).to_be 2
describe "#third()", ->
it "returns the third element", ->
expect([1, 2, 3].third()).to_be 3
describe "#rest()", ->
it "returns a new array containing all except the first element", ->
expect([1, 2, 3].rest()).to_equal [2, 3]
describe "#last()", ->
it "returns the last element if |count| is not given", ->
expect([1, 2, 3].last()).to_be 3
expect([].last()).to_be_null()
it "returns a new array containing the last N elements if |count| = N is given", ->
expect([1, 2, 3].last 0).to_equal []
expect([1, 2, 3].last 1).to_equal [3]
expect([1, 2, 3].last 2).to_equal [2, 3]
expect([1, 2, 3].last 3).to_equal [1, 2, 3]
expect([1, 2, 3].last 10).to_equal [1, 2, 3]
describe "#compacted()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.compacted()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes all null objects", ->
expect([null, 1, null, 2, null, 3, null].compacted()).to_equal [1, 2, 3]
it "removes all undefined objects", ->
expect([undefined, 1, undefined, 2, undefined, 3, undefined].compacted()).to_equal [1, 2, 3]
describe "#flattened()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.flattened()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "flattens an array", ->
expect([1, [2], [3, [[[4]]]]].flattened()).to_equal [1, 2, 3, 4]
describe "#reversed()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.reversed()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "reverses the order of the objects", ->
array = [1, 2, 3]
expect(array.reversed()).to_equal [3, 2, 1]
describe "#with()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with 4).not.to_be array
expect(array).to_equal [1, 2, 3]
it "adds one object to the end", ->
expect([1, 2, 3].with 4).to_equal [1, 2, 3, 4]
describe "#with_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with_many [4]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends all objects", ->
expect([1, 2, 3].with_many [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#without()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without 3).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of one object", ->
expect([1, 2, 3, 2].without 2).to_equal [1, 3, 2]
it "removes nothing if object is not in array", ->
expect([1, 2, 3].without 4).to_equal [1, 2, 3]
describe "#without_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without_many [3]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of passed in objects", ->
expect([1, 2, 3, 2].without_many [2, 3]).to_equal [1, 2]
it "doesn't remove an object if it is not in the array", ->
expect([1, 2, 3].without_many [4, 5]).to_equal [1, 2, 3]
describe "#without_at()", ->
it "removes object at specified index", ->
expect([1, 2, 3].remove_at 1).to_equal [1, 3]
describe "#unique()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3, 3]
expect(array.unique()).not.to_be array
expect(array).to_equal [1, 2, 3, 3]
it "removes all duplicates in an array", ->
expect([1, 1, 1, 2, 2, 2, 3, 3, 3].unique()).to_equal [1, 2, 3]
describe "#intersect()", ->
it "returns the intersection between the receiver and an array", ->
expect([0, 1, 2, 3, 4, 5].intersect([1, 2, 3, 6])).to_equal [1, 2, 3]
it "removes duplicates", ->
expect([1, 2, 2, 3, 3, 3].intersect([1, 2, 3])).to_equal [1, 2, 3]
describe "#unite()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.unite [4, 5]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends one array", ->
expect([1, 2, 3].unite [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
it "removes duplicates after uniting all arrays", ->
expect([1, 1, 2, 2, 3, 3].unite [4, 4, 5, 5, 6, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#zip()", ->
it "zips receiver with an array of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three']).to_equal [[1, 'one'], [2, 'two'], [3, 'three']]
it "zips receiver with many arrays of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three'], ['uno', 'due', 'tres']).to_equal [[1, 'one', 'uno'], [2, 'two', 'due'], [3, 'three', 'tres']]
it "fills up with undefined if arrays are of different length", ->
expect([1, 2, 3].zip ['one', 'two'], ['uno']).to_equal [[1, 'one', 'uno'], [2, 'two', undefined], [3, undefined, undefined]]
describe "#index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].index_of 2).to_be 1
it "returns the first found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].index_of 2).to_be 1
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].index_of 4).to_be -1
describe "#last_index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].last_index_of 2).to_be 1
it "returns the last found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].last_index_of 2).to_be 3
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].last_index_of 4).to_be -1
describe "#indexes_of()", ->
it "returns all indexes of a object", ->
expect([1, 2, 3, 2, 4].indexes_of 2).to_equal [1, 3]
it "returns empty array if the object is not contained in the array", ->
expect([1, 2, 3].indexes_of 4).to_equal []
describe "#add()", ->
it "appends one object", ->
array = [1, 2, 3]
array.add 4
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add 4).to_be array
describe "#add_many()", ->
it "appends many objects", ->
array = [1, 2, 3]
array.add_many [4, 5, 6]
expect(array).to_equal [1, 2, 3, 4, 5, 6]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add_many [4]).to_be array
describe "#insert_at()", ->
it "inserts the object at a specified index", ->
array = [1, 2, 4]
array.insert_at 3, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_at 4, 0).to_be array
describe "#insert_many_at()", ->
it "inserts the objects at a specified index", ->
array = [1, 2, 5]
array.insert_many_at [3, 4], 2
expect(array).to_equal [1, 2, 3, 4, 5]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_many_at [4], 0).to_be array
describe "#insert_before()", ->
it "inserts the object before the first occurence of a specific object", ->
array = [1, 2, 4, 5, 4, 3, 2, 1]
array.insert_before 3, 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_before 0, 1).to_be array
describe "#insert_many_before()", ->
it "inserts objects before the first occurence of a specific object", ->
array = [1, 4, 5, 4, 3, 2, 1]
array.insert_many_before [2, 3], 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 4, 5, 4, 3, 2, 1]
expect(array.insert_many_before [2, 3], 4).to_be array
describe "#insert_after()", ->
it "inserts the object after the last occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3, 2]
array.insert_after 1, 2
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_after [4], 3).to_be array
describe "#insert_many_after()", ->
it "inserts the objects before the first occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3]
array.insert_many_after [2, 1], 3
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3, 4, 5, 4, 3]
expect(array.insert_many_after [2, 1], 3).to_be array
describe "#remove()", ->
it "removes first occurence of one object", ->
array = [1, 2, 3, 2]
array.remove 2
expect(array).to_equal [1, 3, 2]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove 3).to_be array
describe "#remove_many()", ->
it "removes first occurences of objects", ->
array = [1, 2, 3, 2]
array.remove_many [2, 3, 2]
expect(array).to_equal [1]
it "removes nothing if no object from the collection is not in the array", ->
array = [1, 2, 3]
array.remove_many [4, 5]
expect(array).to_equal [1, 2, 3]
it "removes nothing if collection is empty", ->
array = [1, 2, 3]
array.remove_many([])
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_many [3]).to_be array
describe "#remove_at()", ->
it "removes object at specified index", ->
array = [1, 2, 3]
array.remove_at 1
expect(array).to_equal [1, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_at 0).to_be array
describe "#replace_with()", ->
it "replaces first occurence of object with replacement", ->
array = [1, 4, 3, 4]
array.replace_with 4, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with 1, 5).to_be array
describe "#replace_with_many()", ->
it "replaces first occurence of object with many objects", ->
array = [1, 4, 3, 4]
array.replace_with_many 4, [2, 2]
expect(array).to_equal [1, 2, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with_many 1, [5, 6]).to_be array
describe "#replace_at_with()", ->
it "replaces object at index with object", ->
array = [1, 4, 3]
array.replace_at_with 1, 2
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#replace_at_with_many()", ->
it "replaces object at index with many objects", ->
array = [1, 4, 4]
array.replace_at_with_many 1, [2, 3]
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#sort_by()", ->
peter = name: "_peter", age: 23
maxim = name: "_maxim", age: 40
jessi = name: "_jessi", age: 54
kevin = name: "_kevin", age: 33
inna1 = name: "_inna", age: 36
inna2 = name: "_inna", age: 33
people = null
before ->
people = [peter, maxim, jessi, kevin, inna1, inna2]
after ->
people = null
it "sorts by one property name", ->
people.sort_by ['name']
expect(people).to_equal [inna1, inna2, jessi, kevin, maxim, peter]
it "sorts by many property names", ->
people.sort_by ['name', 'age']
expect(people).to_equal [inna2, inna1, jessi, kevin, maxim, peter]
it "returns the receiver", ->
expect(people.sort_by ['name']).to_be people
describe "#copy()", ->
it "copys an array", ->
array = [1, 2, 3]
copy = array.copy()
expect(copy).not.to_be array
expect(copy).to_equal array
describe "#equals()", ->
it "returns true for an array with the same objects", ->
expect([1, 2, 3].equals [1, 2, 3]).to_be true
it "returns false for undefined or null", ->
expect([1, 2, 3].equals null).to_be false
expect([1, 2, 3].equals()).to_be false
it "returns false for an array with the same objects but in a different order", ->
expect([1, 2, 3].equals [1, 3, 2]).to_be false
it "returns false when passing something else than an array (for example an object)", ->
expect([1, 2, 3].equals {}).to_be false
| 111171 | #
# Copyright (c) 2012 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Array", ->
describe "#each()", ->
it "iterates over all objects in a collection", ->
squared = []
[1, 2, 3].each (number) -> squared.add number * number
expect(squared).to_equal [1, 4, 9]
describe "#collect()", ->
it "returns all objects after applying block on them", ->
expect([1, 2, 3].collect (number) -> number * number).to_equal [1, 4, 9]
describe "#select()", ->
it "returns all objects for which the block returns true", ->
expect([1, 2, 3, 4, 5].select (number) -> number == 3).to_equal [3]
describe "#reject()", ->
it "returns all objects for which the block returns false", ->
expect([1, 2, 3, 4, 5].reject (number) -> number == 3).to_equal [1, 2, 4, 5]
describe "#partition()", ->
it "partitions the elements into: [selected, rejected], where selected contains all truthy values and rejected contains all falsy values", ->
expect(['hello', null, 42, false, true, undefined, 17].partition()).to_equal [['hello', 42, true, 17], [null, false, undefined]]
it "also accepts a block that is called for each value returning either true or false", ->
expect([1..10].partition (value) -> value % 2 == 0).to_equal [[2, 4, 6, 8, 10], [1, 3, 5, 7, 9]]
describe "#detect()", ->
it "returns the first object for which the block returns true", ->
expect([1, 2, 3, 4, 5].detect (number) -> number == 3).to_be 3
describe "#all()", ->
it "returns true if block returns true for all objects", ->
expect([1, 2, 3].all (number) -> true).to_be true
it "returns false if block returns false for at least one object", ->
expect([1, 2, 3].all (number) -> if number == 3 then false else true).to_be false
describe "#any()", ->
it "returns true if block returns true for at least one object", ->
expect([1, 2, 3].any (number) -> if number == 2 then true else false).to_be true
it "returns false if block returns false for all objects", ->
expect([1, 2, 3].any (number) -> false).to_be false
describe "#max()", ->
it "returns the max value", ->
expect([1, 3, 2, 4, 1].max()).to_be 4
it "returns null if empty", ->
expect([].max()).to_be null
describe "#min()", ->
it "returns the min value", ->
expect([1, 3, 2, -1, 1].min()).to_be -1
it "returns null if empty", ->
expect([].min()).to_be null
describe "#group_by()", ->
describe "when a key is passed", ->
it "returns a hash containing groups using the value of object's properties as keys", ->
peter = name: "<NAME>"
maxim = name: "<NAME>"
inna1 = name: "<NAME>"
inna2 = name: "<NAME>"
groups = [<NAME>, maxim, inna1, inna2].group_by 'name'
expect(groups["Peter"]).to_equal [peter]
expect(groups["Maxim"]).to_equal [maxim]
expect(groups["Inna"]).to_equal [inna1, inna2]
describe "when a block is passed", ->
it "returns a hash containing groups using the results of the block as keys", ->
groups = [0, 1, 2, 3, 4, 5].group_by (value) -> if value % 2 == 0 then "even" else "odd"
expect(groups["odd"]).to_equal [1, 3, 5]
expect(groups["even"]).to_equal [0, 2, 4]
describe "#inject()", ->
it "behaves like reduce() while taking the initial parameter as the first argument", ->
expect([1, 2, 4].inject 0, (sum, number) -> sum + number).to_be 7
it "returns the initial parameter if array is empty", ->
expect([].inject 123, (sum, number) -> sum + number).to_be 123
describe "#contains()", ->
it "returns true if collection contains value", ->
expect([1, 2, 3].contains 2).to_be true
it "returns false if collection does not contain the value", ->
expect([1, 2, 3].contains 4).to_be false
describe "#pluck()", ->
it "returns an array collecting the values for the given key", ->
people = [{name: <NAME>", age: 59}, {name: <NAME>", age: 45}, {name: <NAME>", age: 4}]
expect(people.pluck 'name').to_equal ["_<NAME>", "_<NAME>her", "_heinerle"]
describe "#count()", ->
it "returns the number of elements in a collection", ->
expect([].count()).to_be 0
expect([1, 2, 3].count()).to_be 3
expect([1, 2, 3, 3, 3].count()).to_be 5
describe "#is_empty()", ->
it "returns true if collection is empty", ->
expect([].is_empty()).to_be true
it "returns false if collection has at least one element", ->
expect([1].is_empty()).to_be false
expect([1, 2, 3].is_empty()).to_be false
describe "#first()", ->
it "returns the first element if |count| is not given", ->
expect([1, 2, 3].first()).to_be 1
expect([].first()).to_be_null()
it "returns a new array containing the first N elements if |count| = N is given", ->
expect([1, 2, 3].first 0).to_equal []
expect([1, 2, 3].first 1).to_equal [1]
expect([1, 2, 3].first 2).to_equal [1, 2]
expect([1, 2, 3].first 3).to_equal [1, 2, 3]
expect([1, 2, 3].first 10).to_equal [1, 2, 3]
describe "#second()", ->
it "returns the second element", ->
expect([1, 2, 3].second()).to_be 2
describe "#third()", ->
it "returns the third element", ->
expect([1, 2, 3].third()).to_be 3
describe "#rest()", ->
it "returns a new array containing all except the first element", ->
expect([1, 2, 3].rest()).to_equal [2, 3]
describe "#last()", ->
it "returns the last element if |count| is not given", ->
expect([1, 2, 3].last()).to_be 3
expect([].last()).to_be_null()
it "returns a new array containing the last N elements if |count| = N is given", ->
expect([1, 2, 3].last 0).to_equal []
expect([1, 2, 3].last 1).to_equal [3]
expect([1, 2, 3].last 2).to_equal [2, 3]
expect([1, 2, 3].last 3).to_equal [1, 2, 3]
expect([1, 2, 3].last 10).to_equal [1, 2, 3]
describe "#compacted()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.compacted()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes all null objects", ->
expect([null, 1, null, 2, null, 3, null].compacted()).to_equal [1, 2, 3]
it "removes all undefined objects", ->
expect([undefined, 1, undefined, 2, undefined, 3, undefined].compacted()).to_equal [1, 2, 3]
describe "#flattened()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.flattened()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "flattens an array", ->
expect([1, [2], [3, [[[4]]]]].flattened()).to_equal [1, 2, 3, 4]
describe "#reversed()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.reversed()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "reverses the order of the objects", ->
array = [1, 2, 3]
expect(array.reversed()).to_equal [3, 2, 1]
describe "#with()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with 4).not.to_be array
expect(array).to_equal [1, 2, 3]
it "adds one object to the end", ->
expect([1, 2, 3].with 4).to_equal [1, 2, 3, 4]
describe "#with_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with_many [4]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends all objects", ->
expect([1, 2, 3].with_many [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#without()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without 3).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of one object", ->
expect([1, 2, 3, 2].without 2).to_equal [1, 3, 2]
it "removes nothing if object is not in array", ->
expect([1, 2, 3].without 4).to_equal [1, 2, 3]
describe "#without_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without_many [3]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of passed in objects", ->
expect([1, 2, 3, 2].without_many [2, 3]).to_equal [1, 2]
it "doesn't remove an object if it is not in the array", ->
expect([1, 2, 3].without_many [4, 5]).to_equal [1, 2, 3]
describe "#without_at()", ->
it "removes object at specified index", ->
expect([1, 2, 3].remove_at 1).to_equal [1, 3]
describe "#unique()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3, 3]
expect(array.unique()).not.to_be array
expect(array).to_equal [1, 2, 3, 3]
it "removes all duplicates in an array", ->
expect([1, 1, 1, 2, 2, 2, 3, 3, 3].unique()).to_equal [1, 2, 3]
describe "#intersect()", ->
it "returns the intersection between the receiver and an array", ->
expect([0, 1, 2, 3, 4, 5].intersect([1, 2, 3, 6])).to_equal [1, 2, 3]
it "removes duplicates", ->
expect([1, 2, 2, 3, 3, 3].intersect([1, 2, 3])).to_equal [1, 2, 3]
describe "#unite()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.unite [4, 5]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends one array", ->
expect([1, 2, 3].unite [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
it "removes duplicates after uniting all arrays", ->
expect([1, 1, 2, 2, 3, 3].unite [4, 4, 5, 5, 6, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#zip()", ->
it "zips receiver with an array of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three']).to_equal [[1, 'one'], [2, 'two'], [3, 'three']]
it "zips receiver with many arrays of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three'], ['uno', 'due', 'tres']).to_equal [[1, 'one', 'uno'], [2, 'two', 'due'], [3, 'three', 'tres']]
it "fills up with undefined if arrays are of different length", ->
expect([1, 2, 3].zip ['one', 'two'], ['uno']).to_equal [[1, 'one', 'uno'], [2, 'two', undefined], [3, undefined, undefined]]
describe "#index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].index_of 2).to_be 1
it "returns the first found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].index_of 2).to_be 1
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].index_of 4).to_be -1
describe "#last_index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].last_index_of 2).to_be 1
it "returns the last found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].last_index_of 2).to_be 3
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].last_index_of 4).to_be -1
describe "#indexes_of()", ->
it "returns all indexes of a object", ->
expect([1, 2, 3, 2, 4].indexes_of 2).to_equal [1, 3]
it "returns empty array if the object is not contained in the array", ->
expect([1, 2, 3].indexes_of 4).to_equal []
describe "#add()", ->
it "appends one object", ->
array = [1, 2, 3]
array.add 4
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add 4).to_be array
describe "#add_many()", ->
it "appends many objects", ->
array = [1, 2, 3]
array.add_many [4, 5, 6]
expect(array).to_equal [1, 2, 3, 4, 5, 6]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add_many [4]).to_be array
describe "#insert_at()", ->
it "inserts the object at a specified index", ->
array = [1, 2, 4]
array.insert_at 3, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_at 4, 0).to_be array
describe "#insert_many_at()", ->
it "inserts the objects at a specified index", ->
array = [1, 2, 5]
array.insert_many_at [3, 4], 2
expect(array).to_equal [1, 2, 3, 4, 5]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_many_at [4], 0).to_be array
describe "#insert_before()", ->
it "inserts the object before the first occurence of a specific object", ->
array = [1, 2, 4, 5, 4, 3, 2, 1]
array.insert_before 3, 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_before 0, 1).to_be array
describe "#insert_many_before()", ->
it "inserts objects before the first occurence of a specific object", ->
array = [1, 4, 5, 4, 3, 2, 1]
array.insert_many_before [2, 3], 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 4, 5, 4, 3, 2, 1]
expect(array.insert_many_before [2, 3], 4).to_be array
describe "#insert_after()", ->
it "inserts the object after the last occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3, 2]
array.insert_after 1, 2
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_after [4], 3).to_be array
describe "#insert_many_after()", ->
it "inserts the objects before the first occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3]
array.insert_many_after [2, 1], 3
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3, 4, 5, 4, 3]
expect(array.insert_many_after [2, 1], 3).to_be array
describe "#remove()", ->
it "removes first occurence of one object", ->
array = [1, 2, 3, 2]
array.remove 2
expect(array).to_equal [1, 3, 2]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove 3).to_be array
describe "#remove_many()", ->
it "removes first occurences of objects", ->
array = [1, 2, 3, 2]
array.remove_many [2, 3, 2]
expect(array).to_equal [1]
it "removes nothing if no object from the collection is not in the array", ->
array = [1, 2, 3]
array.remove_many [4, 5]
expect(array).to_equal [1, 2, 3]
it "removes nothing if collection is empty", ->
array = [1, 2, 3]
array.remove_many([])
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_many [3]).to_be array
describe "#remove_at()", ->
it "removes object at specified index", ->
array = [1, 2, 3]
array.remove_at 1
expect(array).to_equal [1, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_at 0).to_be array
describe "#replace_with()", ->
it "replaces first occurence of object with replacement", ->
array = [1, 4, 3, 4]
array.replace_with 4, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with 1, 5).to_be array
describe "#replace_with_many()", ->
it "replaces first occurence of object with many objects", ->
array = [1, 4, 3, 4]
array.replace_with_many 4, [2, 2]
expect(array).to_equal [1, 2, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with_many 1, [5, 6]).to_be array
describe "#replace_at_with()", ->
it "replaces object at index with object", ->
array = [1, 4, 3]
array.replace_at_with 1, 2
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#replace_at_with_many()", ->
it "replaces object at index with many objects", ->
array = [1, 4, 4]
array.replace_at_with_many 1, [2, 3]
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#sort_by()", ->
<NAME> = name: <NAME>", age: 23
maxim = name: <NAME>", age: 40
jessi = name: <NAME>", age: 54
kevin = name: <NAME>", age: 33
inna1 = name: <NAME>", age: 36
inna2 = name: <NAME>", age: 33
people = null
before ->
people = [<NAME>, <NAME>, <NAME>, <NAME>, inna1, inna2]
after ->
people = null
it "sorts by one property name", ->
people.sort_by ['name']
expect(people).to_equal [inna1, inna2, <NAME>i, <NAME>, maxim, peter]
it "sorts by many property names", ->
people.sort_by ['name', 'age']
expect(people).to_equal [inna2, inna1, jessi, <NAME>vin, maxim, peter]
it "returns the receiver", ->
expect(people.sort_by ['name']).to_be people
describe "#copy()", ->
it "copys an array", ->
array = [1, 2, 3]
copy = array.copy()
expect(copy).not.to_be array
expect(copy).to_equal array
describe "#equals()", ->
it "returns true for an array with the same objects", ->
expect([1, 2, 3].equals [1, 2, 3]).to_be true
it "returns false for undefined or null", ->
expect([1, 2, 3].equals null).to_be false
expect([1, 2, 3].equals()).to_be false
it "returns false for an array with the same objects but in a different order", ->
expect([1, 2, 3].equals [1, 3, 2]).to_be false
it "returns false when passing something else than an array (for example an object)", ->
expect([1, 2, 3].equals {}).to_be false
| true | #
# Copyright (c) 2012 PI:NAME:<NAME>END_PI.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Array", ->
describe "#each()", ->
it "iterates over all objects in a collection", ->
squared = []
[1, 2, 3].each (number) -> squared.add number * number
expect(squared).to_equal [1, 4, 9]
describe "#collect()", ->
it "returns all objects after applying block on them", ->
expect([1, 2, 3].collect (number) -> number * number).to_equal [1, 4, 9]
describe "#select()", ->
it "returns all objects for which the block returns true", ->
expect([1, 2, 3, 4, 5].select (number) -> number == 3).to_equal [3]
describe "#reject()", ->
it "returns all objects for which the block returns false", ->
expect([1, 2, 3, 4, 5].reject (number) -> number == 3).to_equal [1, 2, 4, 5]
describe "#partition()", ->
it "partitions the elements into: [selected, rejected], where selected contains all truthy values and rejected contains all falsy values", ->
expect(['hello', null, 42, false, true, undefined, 17].partition()).to_equal [['hello', 42, true, 17], [null, false, undefined]]
it "also accepts a block that is called for each value returning either true or false", ->
expect([1..10].partition (value) -> value % 2 == 0).to_equal [[2, 4, 6, 8, 10], [1, 3, 5, 7, 9]]
describe "#detect()", ->
it "returns the first object for which the block returns true", ->
expect([1, 2, 3, 4, 5].detect (number) -> number == 3).to_be 3
describe "#all()", ->
it "returns true if block returns true for all objects", ->
expect([1, 2, 3].all (number) -> true).to_be true
it "returns false if block returns false for at least one object", ->
expect([1, 2, 3].all (number) -> if number == 3 then false else true).to_be false
describe "#any()", ->
it "returns true if block returns true for at least one object", ->
expect([1, 2, 3].any (number) -> if number == 2 then true else false).to_be true
it "returns false if block returns false for all objects", ->
expect([1, 2, 3].any (number) -> false).to_be false
describe "#max()", ->
it "returns the max value", ->
expect([1, 3, 2, 4, 1].max()).to_be 4
it "returns null if empty", ->
expect([].max()).to_be null
describe "#min()", ->
it "returns the min value", ->
expect([1, 3, 2, -1, 1].min()).to_be -1
it "returns null if empty", ->
expect([].min()).to_be null
describe "#group_by()", ->
describe "when a key is passed", ->
it "returns a hash containing groups using the value of object's properties as keys", ->
peter = name: "PI:NAME:<NAME>END_PI"
maxim = name: "PI:NAME:<NAME>END_PI"
inna1 = name: "PI:NAME:<NAME>END_PI"
inna2 = name: "PI:NAME:<NAME>END_PI"
groups = [PI:NAME:<NAME>END_PI, maxim, inna1, inna2].group_by 'name'
expect(groups["Peter"]).to_equal [peter]
expect(groups["Maxim"]).to_equal [maxim]
expect(groups["Inna"]).to_equal [inna1, inna2]
describe "when a block is passed", ->
it "returns a hash containing groups using the results of the block as keys", ->
groups = [0, 1, 2, 3, 4, 5].group_by (value) -> if value % 2 == 0 then "even" else "odd"
expect(groups["odd"]).to_equal [1, 3, 5]
expect(groups["even"]).to_equal [0, 2, 4]
describe "#inject()", ->
it "behaves like reduce() while taking the initial parameter as the first argument", ->
expect([1, 2, 4].inject 0, (sum, number) -> sum + number).to_be 7
it "returns the initial parameter if array is empty", ->
expect([].inject 123, (sum, number) -> sum + number).to_be 123
describe "#contains()", ->
it "returns true if collection contains value", ->
expect([1, 2, 3].contains 2).to_be true
it "returns false if collection does not contain the value", ->
expect([1, 2, 3].contains 4).to_be false
describe "#pluck()", ->
it "returns an array collecting the values for the given key", ->
people = [{name: PI:NAME:<NAME>END_PI", age: 59}, {name: PI:NAME:<NAME>END_PI", age: 45}, {name: PI:NAME:<NAME>END_PI", age: 4}]
expect(people.pluck 'name').to_equal ["_PI:NAME:<NAME>END_PI", "_PI:NAME:<NAME>END_PIher", "_heinerle"]
describe "#count()", ->
it "returns the number of elements in a collection", ->
expect([].count()).to_be 0
expect([1, 2, 3].count()).to_be 3
expect([1, 2, 3, 3, 3].count()).to_be 5
describe "#is_empty()", ->
it "returns true if collection is empty", ->
expect([].is_empty()).to_be true
it "returns false if collection has at least one element", ->
expect([1].is_empty()).to_be false
expect([1, 2, 3].is_empty()).to_be false
describe "#first()", ->
it "returns the first element if |count| is not given", ->
expect([1, 2, 3].first()).to_be 1
expect([].first()).to_be_null()
it "returns a new array containing the first N elements if |count| = N is given", ->
expect([1, 2, 3].first 0).to_equal []
expect([1, 2, 3].first 1).to_equal [1]
expect([1, 2, 3].first 2).to_equal [1, 2]
expect([1, 2, 3].first 3).to_equal [1, 2, 3]
expect([1, 2, 3].first 10).to_equal [1, 2, 3]
describe "#second()", ->
it "returns the second element", ->
expect([1, 2, 3].second()).to_be 2
describe "#third()", ->
it "returns the third element", ->
expect([1, 2, 3].third()).to_be 3
describe "#rest()", ->
it "returns a new array containing all except the first element", ->
expect([1, 2, 3].rest()).to_equal [2, 3]
describe "#last()", ->
it "returns the last element if |count| is not given", ->
expect([1, 2, 3].last()).to_be 3
expect([].last()).to_be_null()
it "returns a new array containing the last N elements if |count| = N is given", ->
expect([1, 2, 3].last 0).to_equal []
expect([1, 2, 3].last 1).to_equal [3]
expect([1, 2, 3].last 2).to_equal [2, 3]
expect([1, 2, 3].last 3).to_equal [1, 2, 3]
expect([1, 2, 3].last 10).to_equal [1, 2, 3]
describe "#compacted()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.compacted()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes all null objects", ->
expect([null, 1, null, 2, null, 3, null].compacted()).to_equal [1, 2, 3]
it "removes all undefined objects", ->
expect([undefined, 1, undefined, 2, undefined, 3, undefined].compacted()).to_equal [1, 2, 3]
describe "#flattened()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.flattened()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "flattens an array", ->
expect([1, [2], [3, [[[4]]]]].flattened()).to_equal [1, 2, 3, 4]
describe "#reversed()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.reversed()).not.to_be array
expect(array).to_equal [1, 2, 3]
it "reverses the order of the objects", ->
array = [1, 2, 3]
expect(array.reversed()).to_equal [3, 2, 1]
describe "#with()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with 4).not.to_be array
expect(array).to_equal [1, 2, 3]
it "adds one object to the end", ->
expect([1, 2, 3].with 4).to_equal [1, 2, 3, 4]
describe "#with_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.with_many [4]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends all objects", ->
expect([1, 2, 3].with_many [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#without()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without 3).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of one object", ->
expect([1, 2, 3, 2].without 2).to_equal [1, 3, 2]
it "removes nothing if object is not in array", ->
expect([1, 2, 3].without 4).to_equal [1, 2, 3]
describe "#without_many()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.without_many [3]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "removes first occurences of passed in objects", ->
expect([1, 2, 3, 2].without_many [2, 3]).to_equal [1, 2]
it "doesn't remove an object if it is not in the array", ->
expect([1, 2, 3].without_many [4, 5]).to_equal [1, 2, 3]
describe "#without_at()", ->
it "removes object at specified index", ->
expect([1, 2, 3].remove_at 1).to_equal [1, 3]
describe "#unique()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3, 3]
expect(array.unique()).not.to_be array
expect(array).to_equal [1, 2, 3, 3]
it "removes all duplicates in an array", ->
expect([1, 1, 1, 2, 2, 2, 3, 3, 3].unique()).to_equal [1, 2, 3]
describe "#intersect()", ->
it "returns the intersection between the receiver and an array", ->
expect([0, 1, 2, 3, 4, 5].intersect([1, 2, 3, 6])).to_equal [1, 2, 3]
it "removes duplicates", ->
expect([1, 2, 2, 3, 3, 3].intersect([1, 2, 3])).to_equal [1, 2, 3]
describe "#unite()", ->
it "returns a new array without modifying the receiver", ->
array = [1, 2, 3]
expect(array.unite [4, 5]).not.to_be array
expect(array).to_equal [1, 2, 3]
it "appends one array", ->
expect([1, 2, 3].unite [4, 5, 6]).to_equal [1, 2, 3, 4, 5, 6]
it "removes duplicates after uniting all arrays", ->
expect([1, 1, 2, 2, 3, 3].unite [4, 4, 5, 5, 6, 6]).to_equal [1, 2, 3, 4, 5, 6]
describe "#zip()", ->
it "zips receiver with an array of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three']).to_equal [[1, 'one'], [2, 'two'], [3, 'three']]
it "zips receiver with many arrays of the same length", ->
expect([1, 2, 3].zip ['one', 'two', 'three'], ['uno', 'due', 'tres']).to_equal [[1, 'one', 'uno'], [2, 'two', 'due'], [3, 'three', 'tres']]
it "fills up with undefined if arrays are of different length", ->
expect([1, 2, 3].zip ['one', 'two'], ['uno']).to_equal [[1, 'one', 'uno'], [2, 'two', undefined], [3, undefined, undefined]]
describe "#index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].index_of 2).to_be 1
it "returns the first found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].index_of 2).to_be 1
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].index_of 4).to_be -1
describe "#last_index_of()", ->
it "returns the index of object", ->
expect([1, 2, 3].last_index_of 2).to_be 1
it "returns the last found index of object if the object is contained more than once in the array", ->
expect([1, 2, 3, 2].last_index_of 2).to_be 3
it "returns -1 if the object is not contained in the array", ->
expect([1, 2, 3].last_index_of 4).to_be -1
describe "#indexes_of()", ->
it "returns all indexes of a object", ->
expect([1, 2, 3, 2, 4].indexes_of 2).to_equal [1, 3]
it "returns empty array if the object is not contained in the array", ->
expect([1, 2, 3].indexes_of 4).to_equal []
describe "#add()", ->
it "appends one object", ->
array = [1, 2, 3]
array.add 4
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add 4).to_be array
describe "#add_many()", ->
it "appends many objects", ->
array = [1, 2, 3]
array.add_many [4, 5, 6]
expect(array).to_equal [1, 2, 3, 4, 5, 6]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.add_many [4]).to_be array
describe "#insert_at()", ->
it "inserts the object at a specified index", ->
array = [1, 2, 4]
array.insert_at 3, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_at 4, 0).to_be array
describe "#insert_many_at()", ->
it "inserts the objects at a specified index", ->
array = [1, 2, 5]
array.insert_many_at [3, 4], 2
expect(array).to_equal [1, 2, 3, 4, 5]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_many_at [4], 0).to_be array
describe "#insert_before()", ->
it "inserts the object before the first occurence of a specific object", ->
array = [1, 2, 4, 5, 4, 3, 2, 1]
array.insert_before 3, 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_before 0, 1).to_be array
describe "#insert_many_before()", ->
it "inserts objects before the first occurence of a specific object", ->
array = [1, 4, 5, 4, 3, 2, 1]
array.insert_many_before [2, 3], 4
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 4, 5, 4, 3, 2, 1]
expect(array.insert_many_before [2, 3], 4).to_be array
describe "#insert_after()", ->
it "inserts the object after the last occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3, 2]
array.insert_after 1, 2
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.insert_after [4], 3).to_be array
describe "#insert_many_after()", ->
it "inserts the objects before the first occurence of a specific object", ->
array = [1, 2, 3, 4, 5, 4, 3]
array.insert_many_after [2, 1], 3
expect(array).to_equal [1, 2, 3, 4, 5, 4, 3, 2, 1]
it "returns the receiver", ->
array = [1, 2, 3, 4, 5, 4, 3]
expect(array.insert_many_after [2, 1], 3).to_be array
describe "#remove()", ->
it "removes first occurence of one object", ->
array = [1, 2, 3, 2]
array.remove 2
expect(array).to_equal [1, 3, 2]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove 3).to_be array
describe "#remove_many()", ->
it "removes first occurences of objects", ->
array = [1, 2, 3, 2]
array.remove_many [2, 3, 2]
expect(array).to_equal [1]
it "removes nothing if no object from the collection is not in the array", ->
array = [1, 2, 3]
array.remove_many [4, 5]
expect(array).to_equal [1, 2, 3]
it "removes nothing if collection is empty", ->
array = [1, 2, 3]
array.remove_many([])
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_many [3]).to_be array
describe "#remove_at()", ->
it "removes object at specified index", ->
array = [1, 2, 3]
array.remove_at 1
expect(array).to_equal [1, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.remove_at 0).to_be array
describe "#replace_with()", ->
it "replaces first occurence of object with replacement", ->
array = [1, 4, 3, 4]
array.replace_with 4, 2
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with 1, 5).to_be array
describe "#replace_with_many()", ->
it "replaces first occurence of object with many objects", ->
array = [1, 4, 3, 4]
array.replace_with_many 4, [2, 2]
expect(array).to_equal [1, 2, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_with_many 1, [5, 6]).to_be array
describe "#replace_at_with()", ->
it "replaces object at index with object", ->
array = [1, 4, 3]
array.replace_at_with 1, 2
expect(array).to_equal [1, 2, 3]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#replace_at_with_many()", ->
it "replaces object at index with many objects", ->
array = [1, 4, 4]
array.replace_at_with_many 1, [2, 3]
expect(array).to_equal [1, 2, 3, 4]
it "returns the receiver", ->
array = [1, 2, 3]
expect(array.replace_at_with 0, 5).to_be array
describe "#sort_by()", ->
PI:NAME:<NAME>END_PI = name: PI:NAME:<NAME>END_PI", age: 23
maxim = name: PI:NAME:<NAME>END_PI", age: 40
jessi = name: PI:NAME:<NAME>END_PI", age: 54
kevin = name: PI:NAME:<NAME>END_PI", age: 33
inna1 = name: PI:NAME:<NAME>END_PI", age: 36
inna2 = name: PI:NAME:<NAME>END_PI", age: 33
people = null
before ->
people = [PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, inna1, inna2]
after ->
people = null
it "sorts by one property name", ->
people.sort_by ['name']
expect(people).to_equal [inna1, inna2, PI:NAME:<NAME>END_PIi, PI:NAME:<NAME>END_PI, maxim, peter]
it "sorts by many property names", ->
people.sort_by ['name', 'age']
expect(people).to_equal [inna2, inna1, jessi, PI:NAME:<NAME>END_PIvin, maxim, peter]
it "returns the receiver", ->
expect(people.sort_by ['name']).to_be people
describe "#copy()", ->
it "copys an array", ->
array = [1, 2, 3]
copy = array.copy()
expect(copy).not.to_be array
expect(copy).to_equal array
describe "#equals()", ->
it "returns true for an array with the same objects", ->
expect([1, 2, 3].equals [1, 2, 3]).to_be true
it "returns false for undefined or null", ->
expect([1, 2, 3].equals null).to_be false
expect([1, 2, 3].equals()).to_be false
it "returns false for an array with the same objects but in a different order", ->
expect([1, 2, 3].equals [1, 3, 2]).to_be false
it "returns false when passing something else than an array (for example an object)", ->
expect([1, 2, 3].equals {}).to_be false
|
[
{
"context": "istribution: someDebian(distros)\n password: genpass()\n metadata:\n purpose: \"testing\"\n ",
"end": 1635,
"score": 0.9278811812400818,
"start": 1628,
"tag": "PASSWORD",
"value": "genpass"
}
] | src/test.coffee | typeduck/jiffybox | 0 | ###############################################################################
# Tests for JiffyBox Client Library
###############################################################################
should = require("should")
Promise = require("bluebird")
Moment = require("moment")
jiffybox = require("./JiffyBoxClient")
CONFIG = require("convig").env({
APIKEY: () -> throw new Error("set env var APIKEY for tests!")
})
describe "JiffBoxClient", () ->
client = null
before () -> client = jiffybox(CONFIG.APIKEY)
genpass = () ->
lower = "abcdefghijklmnopqrstuvwxyz".split("")
upper = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("")
numbers = "0123456789".split("")
special = "!@#$%^&*()_-".split("")
pick = (a, num) ->
s = []
i = 0
while i++ < num
s.push( a[Math.floor(Math.random() * a.length)] )
return s
p = pick(lower, 4)
p = p.concat(pick(upper, 4))
p = p.concat(pick(numbers, 3))
p = p.concat(pick(special, 3))
return p.join("")
distros = null
plans = null
it "should be able to access Distros", () ->
@timeout(10000)
Promise.try(() -> client.getDistros() )
.then((data) -> distros = data)
it "should be able to access Plans", () ->
@timeout(10000)
Promise.try(() -> client.getPlans())
.then((data) -> plans = data)
it "should create, wait for READY, and delete a JiffyBox", () ->
@timeout(600000)
Promise.bind({}).then(() ->
params =
name: "auto-" + Moment().format("YYYY-MM-DD[T]HH.mm.ss")
planid: cheapestPlanId(plans)
distribution: someDebian(distros)
password: genpass()
metadata:
purpose: "testing"
created: new Date()
client.createJiffyBox(params)
)
.then((@box) ->
isReady = (box) -> box.running and box.status is "READY"
client.waitForStatus(@box.id, isReady)
)
.then(() ->
canShutdown = (box) -> box.status is "READY" and not box.running
client.setStatusAndWait(@box.id, "SHUTDOWN", canShutdown)
)
.then(() ->
client.deleteJiffyBox(@box.id)
)
# finds the cheapest plan from the list
cheapestPlanId = (plans) ->
plans.sort (a, b) -> 100 * (a.pricePerHour - b.pricePerHour)
plans[0].id
# Finds some flavour of Debian to install
someDebian = (distros) ->
for distro in distros when (/debian/i).test(distro.name)
return distro.key
| 98133 | ###############################################################################
# Tests for JiffyBox Client Library
###############################################################################
should = require("should")
Promise = require("bluebird")
Moment = require("moment")
jiffybox = require("./JiffyBoxClient")
CONFIG = require("convig").env({
APIKEY: () -> throw new Error("set env var APIKEY for tests!")
})
describe "JiffBoxClient", () ->
client = null
before () -> client = jiffybox(CONFIG.APIKEY)
genpass = () ->
lower = "abcdefghijklmnopqrstuvwxyz".split("")
upper = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("")
numbers = "0123456789".split("")
special = "!@#$%^&*()_-".split("")
pick = (a, num) ->
s = []
i = 0
while i++ < num
s.push( a[Math.floor(Math.random() * a.length)] )
return s
p = pick(lower, 4)
p = p.concat(pick(upper, 4))
p = p.concat(pick(numbers, 3))
p = p.concat(pick(special, 3))
return p.join("")
distros = null
plans = null
it "should be able to access Distros", () ->
@timeout(10000)
Promise.try(() -> client.getDistros() )
.then((data) -> distros = data)
it "should be able to access Plans", () ->
@timeout(10000)
Promise.try(() -> client.getPlans())
.then((data) -> plans = data)
it "should create, wait for READY, and delete a JiffyBox", () ->
@timeout(600000)
Promise.bind({}).then(() ->
params =
name: "auto-" + Moment().format("YYYY-MM-DD[T]HH.mm.ss")
planid: cheapestPlanId(plans)
distribution: someDebian(distros)
password: <PASSWORD>()
metadata:
purpose: "testing"
created: new Date()
client.createJiffyBox(params)
)
.then((@box) ->
isReady = (box) -> box.running and box.status is "READY"
client.waitForStatus(@box.id, isReady)
)
.then(() ->
canShutdown = (box) -> box.status is "READY" and not box.running
client.setStatusAndWait(@box.id, "SHUTDOWN", canShutdown)
)
.then(() ->
client.deleteJiffyBox(@box.id)
)
# finds the cheapest plan from the list
cheapestPlanId = (plans) ->
plans.sort (a, b) -> 100 * (a.pricePerHour - b.pricePerHour)
plans[0].id
# Finds some flavour of Debian to install
someDebian = (distros) ->
for distro in distros when (/debian/i).test(distro.name)
return distro.key
| true | ###############################################################################
# Tests for JiffyBox Client Library
###############################################################################
should = require("should")
Promise = require("bluebird")
Moment = require("moment")
jiffybox = require("./JiffyBoxClient")
CONFIG = require("convig").env({
APIKEY: () -> throw new Error("set env var APIKEY for tests!")
})
describe "JiffBoxClient", () ->
client = null
before () -> client = jiffybox(CONFIG.APIKEY)
genpass = () ->
lower = "abcdefghijklmnopqrstuvwxyz".split("")
upper = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("")
numbers = "0123456789".split("")
special = "!@#$%^&*()_-".split("")
pick = (a, num) ->
s = []
i = 0
while i++ < num
s.push( a[Math.floor(Math.random() * a.length)] )
return s
p = pick(lower, 4)
p = p.concat(pick(upper, 4))
p = p.concat(pick(numbers, 3))
p = p.concat(pick(special, 3))
return p.join("")
distros = null
plans = null
it "should be able to access Distros", () ->
@timeout(10000)
Promise.try(() -> client.getDistros() )
.then((data) -> distros = data)
it "should be able to access Plans", () ->
@timeout(10000)
Promise.try(() -> client.getPlans())
.then((data) -> plans = data)
it "should create, wait for READY, and delete a JiffyBox", () ->
@timeout(600000)
Promise.bind({}).then(() ->
params =
name: "auto-" + Moment().format("YYYY-MM-DD[T]HH.mm.ss")
planid: cheapestPlanId(plans)
distribution: someDebian(distros)
password: PI:PASSWORD:<PASSWORD>END_PI()
metadata:
purpose: "testing"
created: new Date()
client.createJiffyBox(params)
)
.then((@box) ->
isReady = (box) -> box.running and box.status is "READY"
client.waitForStatus(@box.id, isReady)
)
.then(() ->
canShutdown = (box) -> box.status is "READY" and not box.running
client.setStatusAndWait(@box.id, "SHUTDOWN", canShutdown)
)
.then(() ->
client.deleteJiffyBox(@box.id)
)
# finds the cheapest plan from the list
cheapestPlanId = (plans) ->
plans.sort (a, b) -> 100 * (a.pricePerHour - b.pricePerHour)
plans[0].id
# Finds some flavour of Debian to install
someDebian = (distros) ->
for distro in distros when (/debian/i).test(distro.name)
return distro.key
|
[
{
"context": "#\n# Copyright 2014 Carsten Klein\n#\n# Licensed under the Apache License, Version 2.",
"end": 32,
"score": 0.9998637437820435,
"start": 19,
"tag": "NAME",
"value": "Carsten Klein"
}
] | test/monkeypatch-test.coffee | vibejs/vibejs-subclassof | 0 | #
# Copyright 2014 Carsten Klein
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
require '../src/macros'
vows.describe 'monkey patches'
.addBatch
'Standard Errors are subclasses of Error' :
'EvalError' : ->
assert.subclassOf EvalError, Error
'RangeError' : ->
assert.subclassOf RangeError, Error
'ReferenceError' : ->
assert.subclassOf ReferenceError, Error
'SyntaxError' : ->
assert.subclassOf SyntaxError, Error
'TypeError' : ->
assert.subclassOf TypeError, Error
'URIError' : ->
assert.subclassOf URIError, Error
.export module
| 49034 | #
# Copyright 2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
require '../src/macros'
vows.describe 'monkey patches'
.addBatch
'Standard Errors are subclasses of Error' :
'EvalError' : ->
assert.subclassOf EvalError, Error
'RangeError' : ->
assert.subclassOf RangeError, Error
'ReferenceError' : ->
assert.subclassOf ReferenceError, Error
'SyntaxError' : ->
assert.subclassOf SyntaxError, Error
'TypeError' : ->
assert.subclassOf TypeError, Error
'URIError' : ->
assert.subclassOf URIError, Error
.export module
| true | #
# Copyright 2014 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
require '../src/macros'
vows.describe 'monkey patches'
.addBatch
'Standard Errors are subclasses of Error' :
'EvalError' : ->
assert.subclassOf EvalError, Error
'RangeError' : ->
assert.subclassOf RangeError, Error
'ReferenceError' : ->
assert.subclassOf ReferenceError, Error
'SyntaxError' : ->
assert.subclassOf SyntaxError, Error
'TypeError' : ->
assert.subclassOf TypeError, Error
'URIError' : ->
assert.subclassOf URIError, Error
.export module
|
[
{
"context": "UserContext= (req)->\n uctx=\n ownerRef: \"email:magma.test.u1.77225432@flarebyte.com\"\n universeId: \"magma:f/universe/testing\",\n ",
"end": 188,
"score": 0.9999216794967651,
"start": 152,
"tag": "EMAIL",
"value": "magma.test.u1.77225432@flarebyte.com"
}
] | nodejs/flarebyte.net/0.8/node/flaming/routes/apiOld.coffee | flarebyte/wonderful-bazar | 0 | #
# * Serve JSON to our AngularJS client
#
'use strict'
client = require('flaming-magma-client')
getUserContext= (req)->
uctx=
ownerRef: "email:magma.test.u1.77225432@flarebyte.com"
universeId: "magma:f/universe/testing",
verified: true
return uctx
exports.name = (req, res) ->
viewModel= client.g.lookup('magma:f/view.get/744550486')
ctx=
my: getUserContext req
view: viewModel
qs:
path: "contacts"
client.httpRequest ctx, (err, data)->
res.send data
| 153694 | #
# * Serve JSON to our AngularJS client
#
'use strict'
client = require('flaming-magma-client')
getUserContext= (req)->
uctx=
ownerRef: "email:<EMAIL>"
universeId: "magma:f/universe/testing",
verified: true
return uctx
exports.name = (req, res) ->
viewModel= client.g.lookup('magma:f/view.get/744550486')
ctx=
my: getUserContext req
view: viewModel
qs:
path: "contacts"
client.httpRequest ctx, (err, data)->
res.send data
| true | #
# * Serve JSON to our AngularJS client
#
'use strict'
client = require('flaming-magma-client')
getUserContext= (req)->
uctx=
ownerRef: "email:PI:EMAIL:<EMAIL>END_PI"
universeId: "magma:f/universe/testing",
verified: true
return uctx
exports.name = (req, res) ->
viewModel= client.g.lookup('magma:f/view.get/744550486')
ctx=
my: getUserContext req
view: viewModel
qs:
path: "contacts"
client.httpRequest ctx, (err, data)->
res.send data
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.8025781512260437,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | src/de/spec.coffee | phillipb/Bible-Passage-Reference-Parser | 149 | bcv_parser = require("../../js/de_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (de)", ->
`
expect(p.parse("Erste Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Erste Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ERSTE MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (de)", ->
`
expect(p.parse("Zweite Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Zweite Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("ZWEITE MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (de)", ->
`
expect(p.parse("Bel und Vom Drachen 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (de)", ->
`
expect(p.parse("Dritte Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Dritte Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("DRITTE MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (de)", ->
`
expect(p.parse("Vierte Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Vierte Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERTE BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("VIERTE MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (de)", ->
`
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (de)", ->
`
expect(p.parse("Weisheit Salomos 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weisheit 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weish 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (de)", ->
`
expect(p.parse("Klagelieder Jeremias 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klagelieder 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klag 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klgl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAGELIEDER JEREMIAS 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAGELIEDER 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAG 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (de)", ->
`
expect(p.parse("Brief des Jeremia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("Br Jer 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (de)", ->
`
expect(p.parse("Offenbarung 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Offb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OFFENBARUNG 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OFFB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (de)", ->
`
expect(p.parse("Gebet des Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasses 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Geb Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Or Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (de)", ->
`
expect(p.parse("Funfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Funfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dtn 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("FUNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FUNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DTN 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (de)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (de)", ->
`
expect(p.parse("Richter 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTER 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (de)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (de)", ->
`
expect(p.parse("Erste Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (de)", ->
`
expect(p.parse("Zweite Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (de)", ->
`
expect(p.parse("Isaias 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAIAS 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (de)", ->
`
expect(p.parse("Zweite Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (de)", ->
`
expect(p.parse("Erste Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (de)", ->
`
expect(p.parse("Zweite Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (de)", ->
`
expect(p.parse("Erste Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (de)", ->
`
expect(p.parse("Zweite Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (de)", ->
`
expect(p.parse("Erste Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (de)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (de)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (de)", ->
`
expect(p.parse("Ester \(Griechisch\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Griechisch) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Gr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (de)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (de)", ->
`
expect(p.parse("Hiob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Ijob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Hi 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("IJOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("HI 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (de)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (de)", ->
`
expect(p.parse("Gebet des Asarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Geb As 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (de)", ->
`
expect(p.parse("Sprichworter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprichwörter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprueche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spruche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprüche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPRICHWORTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRICHWÖRTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUECHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRÜCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (de)", ->
`
expect(p.parse("Ecclesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Ekklesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediger 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Koh 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("ECCLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("EKKLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIGER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOH 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (de)", ->
`
expect(p.parse("Lobgesang der drei jungen Manner im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Manner im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Männer im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("L3J 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (de)", ->
`
expect(p.parse("Hohelied Salomonis 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied Salomos 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohes Lied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohelied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hld 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOHELIED SALOMONIS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED SALOMOS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHES LIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHELIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HLD 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (de)", ->
`
expect(p.parse("Jeremias 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAS 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (de)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (de)", ->
`
expect(p.parse("Daniel 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (de)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Osee 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OSEE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (de)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (de)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (de)", ->
`
expect(p.parse("Abdias 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIAS 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (de)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonas 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAS 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (de)", ->
`
expect(p.parse("Michaas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Michäas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHAAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHÄAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (de)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (de)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (de)", ->
`
expect(p.parse("Sophonias 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zephanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Soph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOPHONIAS 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPHANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (de)", ->
`
expect(p.parse("Aggaus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Aggäus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Agg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGGAUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGGÄUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (de)", ->
`
expect(p.parse("Zacharias 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sacharja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIAS 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACHARJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (de)", ->
`
expect(p.parse("Malachias 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALACHIAS 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (de)", ->
`
expect(p.parse("Matthaus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthäus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHAUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÄUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (de)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (de)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (de)", ->
`
expect(p.parse("Erste Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (de)", ->
`
expect(p.parse("Zweite Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (de)", ->
`
expect(p.parse("Dritte Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (de)", ->
`
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (de)", ->
`
expect(p.parse("Apostelgeschichte 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apg 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTELGESCHICHTE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APG 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (de)", ->
`
expect(p.parse("Roemer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Römer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Röm 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROEMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (de)", ->
`
expect(p.parse("Zweite Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (de)", ->
`
expect(p.parse("Erste Korinther 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinther 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinther 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE KORINTHER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (de)", ->
`
expect(p.parse("Galater 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATER 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (de)", ->
`
expect(p.parse("Epheser 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EPHESER 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (de)", ->
`
expect(p.parse("Philipper 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILIPPER 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (de)", ->
`
expect(p.parse("Kolosser 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSER 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (de)", ->
`
expect(p.parse("Zweite Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (de)", ->
`
expect(p.parse("Erste Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (de)", ->
`
expect(p.parse("Zweite Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (de)", ->
`
expect(p.parse("Erste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (de)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (de)", ->
`
expect(p.parse("Philemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (de)", ->
`
expect(p.parse("Hebraeer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebraer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebräer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRAEER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRAER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRÄER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (de)", ->
`
expect(p.parse("Jakobusbrief 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUSBRIEF 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (de)", ->
`
expect(p.parse("Zweite Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (de)", ->
`
expect(p.parse("Erste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (de)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (de)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (de)", ->
`
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (de)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (de)", ->
`
expect(p.parse("Susanna und die Alten 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Susanna im Bade 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Susanna 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (de)", ->
`
expect(p.parse("Zweite Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Zweite Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (de)", ->
`
expect(p.parse("Dritte Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Dritte Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (de)", ->
`
expect(p.parse("Vierte Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierte Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (de)", ->
`
expect(p.parse("Erste Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Erste Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezra,Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra,Esth (de)", ->
`
expect(p.parse("Es 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ES 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Phil,Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil,Phlm (de)", ->
`
expect(p.parse("Ph 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PH 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["de"]
it "should handle ranges (de)", ->
expect(p.parse("Titus 1:1 bis 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1bis2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 BIS 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (de)", ->
expect(p.parse("Titus 1:1, Kapitel 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAPITEL 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (de)", ->
expect(p.parse("Exod 1:1 Verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (de)", ->
expect(p.parse("Exod 1:1 und 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 UND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL. 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (de)", ->
expect(p.parse("Ps 3 Titel, 4:2, 5:Titel").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITEL, 4:2, 5:TITEL").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (de)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (de)", ->
expect(p.parse("Lev 1 (ELB)").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("lev 1 elb").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("Lev 1 (HFA)").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("lev 1 hfa").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("Lev 1 (LUTH1545)").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("lev 1 luth1545").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("Lev 1 (LUTHER)").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("lev 1 luther").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("Lev 1 (SCH1950)").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("lev 1 sch1950").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("Lev 1 (SCH2000)").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
expect(p.parse("lev 1 sch2000").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
it "should handle book ranges (de)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Erste bis Dritte Johannes").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (de)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 33338 | bcv_parser = require("../../js/de_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (de)", ->
`
expect(p.parse("Erste Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Erste Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ERSTE MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (de)", ->
`
expect(p.parse("Zweite Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Zweite Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("ZWEITE MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (de)", ->
`
expect(p.parse("Bel und Vom Drachen 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (de)", ->
`
expect(p.parse("Dritte Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Dritte Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("DRITTE MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (de)", ->
`
expect(p.parse("Vierte Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Vierte Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERTE BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("VIERTE MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (de)", ->
`
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (de)", ->
`
expect(p.parse("Weisheit Salomos 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weisheit 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weish 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Kl<NAME>er 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klag 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klgl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KL<NAME>LIEDER 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAG 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (de)", ->
`
expect(p.parse("Brief des Jeremia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("Br Jer 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (de)", ->
`
expect(p.parse("Offenbarung 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Offb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OFFENBARUNG 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OFFB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (de)", ->
`
expect(p.parse("Gebet des Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasses 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Geb Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Or Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (de)", ->
`
expect(p.parse("Funfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Funfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dtn 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("FUNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FUNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DTN 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book <NAME> (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (de)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book <NAME>udg (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (de)", ->
`
expect(p.parse("Richter 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTER 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (de)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (de)", ->
`
expect(p.parse("Erste Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (de)", ->
`
expect(p.parse("Zweite Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (de)", ->
`
expect(p.parse("Isaias 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAIAS 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (de)", ->
`
expect(p.parse("Zweite <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME>. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (de)", ->
`
expect(p.parse("Zweite Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (de)", ->
`
expect(p.parse("Erste Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (de)", ->
`
expect(p.parse("Zweite Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (de)", ->
`
expect(p.parse("Erste Ch<NAME>ik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (de)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (de)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (de)", ->
`
expect(p.parse("Ester \(Griechisch\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Griechisch) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Gr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (de)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (de)", ->
`
expect(p.parse("Hiob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Ijob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Hi 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("IJOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("HI 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (de)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (de)", ->
`
expect(p.parse("Gebet des Asarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Geb As 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (de)", ->
`
expect(p.parse("Sprichworter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprichwörter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprueche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spruche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprüche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPRICHWORTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRICHWÖRTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUECHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRÜCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (de)", ->
`
expect(p.parse("Ecclesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Ekklesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediger 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Koh 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("ECCLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("EKKLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIGER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOH 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (de)", ->
`
expect(p.parse("Lobgesang der drei jungen Manner im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Manner im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Männer im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("L3J 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (de)", ->
`
expect(p.parse("Hohelied Salomonis 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied Salomos 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohes Lied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohelied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hld 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOHELIED SALOMONIS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED SALOMOS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHES LIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHELIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HLD 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book <NAME> (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAS 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (de)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book <NAME> (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>os (de)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Osee 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OSEE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (de)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (de)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (de)", ->
`
expect(p.parse("Abdias 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIAS 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME>ah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ah (de)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonas 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAS 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (de)", ->
`
expect(p.parse("Michaas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Michäas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHAAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHÄAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (de)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (de)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (de)", ->
`
expect(p.parse("Sophonias 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zephanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Soph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOPHONIAS 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPHANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (de)", ->
`
expect(p.parse("Aggaus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Aggäus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Agg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGGAUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGGÄUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (de)", ->
`
expect(p.parse("Zacharias 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sacharja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIAS 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACHARJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (de)", ->
`
expect(p.parse("Malachias 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALACHIAS 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book <NAME>att (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>att (de)", ->
`
expect(p.parse("Matthaus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthäus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHAUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÄUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (de)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME>uke (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>uke (de)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (de)", ->
`
expect(p.parse("Erste Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (de)", ->
`
expect(p.parse("Zweite Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (de)", ->
`
expect(p.parse("Dritte Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (de)", ->
`
expect(p.parse("Apostelgeschichte 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apg 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTELGESCHICHTE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APG 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (de)", ->
`
expect(p.parse("Roemer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Römer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Röm 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROEMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (de)", ->
`
expect(p.parse("Zweite Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (de)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. K<NAME> 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinther 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. <NAME> 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (de)", ->
`
expect(p.parse("Galater 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATER 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (de)", ->
`
expect(p.parse("Epheser 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EPHESER 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (de)", ->
`
expect(p.parse("Philipper 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILIPPER 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (de)", ->
`
expect(p.parse("Kolosser 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSER 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (de)", ->
`
expect(p.parse("Zweite Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (de)", ->
`
expect(p.parse("Erste Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (de)", ->
`
expect(p.parse("Zweite Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (de)", ->
`
expect(p.parse("Erste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (de)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (de)", ->
`
expect(p.parse("Philemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (de)", ->
`
expect(p.parse("Hebraeer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebraer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebräer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRAEER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRAER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRÄER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (de)", ->
`
expect(p.parse("Jakobusbrief 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUSBRIEF 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (de)", ->
`
expect(p.parse("Zweite Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (de)", ->
`
expect(p.parse("Erste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book <NAME>ude (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ude (de)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (de)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (de)", ->
`
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (de)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (de)", ->
`
expect(p.parse("<NAME> und die Alten 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("<NAME> im Bade 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (de)", ->
`
expect(p.parse("Zweite Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Zweite Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (de)", ->
`
expect(p.parse("Dritte Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Dritte Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (de)", ->
`
expect(p.parse("Vierte Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierte Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (de)", ->
`
expect(p.parse("Erste Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Erste Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezra,Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra,Esth (de)", ->
`
expect(p.parse("Es 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ES 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Phil,Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil,Phlm (de)", ->
`
expect(p.parse("Ph 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PH 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["de"]
it "should handle ranges (de)", ->
expect(p.parse("Titus 1:1 bis 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1bis2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 BIS 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (de)", ->
expect(p.parse("Titus 1:1, Kapitel 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAPITEL 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (de)", ->
expect(p.parse("Exod 1:1 Verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (de)", ->
expect(p.parse("Exod 1:1 und 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 UND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL. 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (de)", ->
expect(p.parse("Ps 3 Titel, 4:2, 5:Titel").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITEL, 4:2, 5:TITEL").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (de)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (de)", ->
expect(p.parse("Lev 1 (ELB)").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("lev 1 elb").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("Lev 1 (HFA)").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("lev 1 hfa").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("Lev 1 (LUTH1545)").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("lev 1 luth1545").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("Lev 1 (LUTHER)").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("lev 1 luther").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("Lev 1 (SCH1950)").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("lev 1 sch1950").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("Lev 1 (SCH2000)").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
expect(p.parse("lev 1 sch2000").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
it "should handle book ranges (de)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Erste bis Dritte Johannes").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (de)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/de_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (de)", ->
`
expect(p.parse("Erste Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Buch Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Erste Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mose 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mo 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 BUCH MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ERSTE MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOSE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (de)", ->
`
expect(p.parse("Zweite Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Buch Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Zweite Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mose 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mo 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 BUCH MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("ZWEITE MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOSE 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (de)", ->
`
expect(p.parse("Bel und Vom Drachen 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (de)", ->
`
expect(p.parse("Dritte Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Buch Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Dritte Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mose 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mo 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 BUCH MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("DRITTE MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOSE 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (de)", ->
`
expect(p.parse("Vierte Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Buch Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Vierte Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mose 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mo 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERTE BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 BUCH MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("VIERTE MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOSE 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (de)", ->
`
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (de)", ->
`
expect(p.parse("Weisheit Salomos 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weisheit 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Weish 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KlPI:NAME:<NAME>END_PIer 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klag 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klgl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLPI:NAME:<NAME>END_PILIEDER 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAG 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (de)", ->
`
expect(p.parse("Brief des Jeremia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("Br Jer 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (de)", ->
`
expect(p.parse("Offenbarung 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Offb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OFFENBARUNG 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OFFB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (de)", ->
`
expect(p.parse("Gebet des Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasses 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Gebet Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Geb Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Or Man 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (de)", ->
`
expect(p.parse("Funfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Buch Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Funfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Fünfte Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mose 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mo 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dtn 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("FUNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 BUCH MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FUNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("FÜNFTE MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOSE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DTN 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (de)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIudg (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (de)", ->
`
expect(p.parse("Richter 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTER 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (de)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (de)", ->
`
expect(p.parse("Erste Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esr 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (de)", ->
`
expect(p.parse("Zweite Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esr 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (de)", ->
`
expect(p.parse("Isaias 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAIAS 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (de)", ->
`
expect(p.parse("Zweite PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (de)", ->
`
expect(p.parse("Zweite Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Zweite Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koenige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Konige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Könige 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kön 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("ZWEITE KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KOENIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖNIGE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KÖN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (de)", ->
`
expect(p.parse("Erste Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Erste Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koenige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Konige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Könige 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kön 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ERSTE KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KOENIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖNIGE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KÖN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (de)", ->
`
expect(p.parse("Zweite Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chronik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Chr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHRONIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 CHR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (de)", ->
`
expect(p.parse("Erste ChPI:NAME:<NAME>END_PIik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chronik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Chr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHRONIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 CHR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (de)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (de)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (de)", ->
`
expect(p.parse("Ester \(Griechisch\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Griechisch) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Gr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (de)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (de)", ->
`
expect(p.parse("Hiob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Ijob 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Hi 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("IJOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("HI 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (de)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (de)", ->
`
expect(p.parse("Gebet des Asarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Geb As 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (de)", ->
`
expect(p.parse("Sprichworter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprichwörter 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprueche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spruche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sprüche 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPRICHWORTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRICHWÖRTER 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUECHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRUCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPRÜCHE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (de)", ->
`
expect(p.parse("Ecclesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Ekklesiastes 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediger 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Koh 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("ECCLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("EKKLESIASTES 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIGER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOH 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (de)", ->
`
expect(p.parse("Lobgesang der drei jungen Manner im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer im Feuerofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Manner im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei Männer im feurigen Ofen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der drei jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Manner 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lobgesang der 3 jungen Männer 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Der Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Gesang der Drei 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("L3J 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (de)", ->
`
expect(p.parse("Hohelied Salomonis 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied Salomos 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohes Lied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoheslied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hohelied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hld 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOHELIED SALOMONIS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED SALOMOS 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHES LIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHESLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOHELIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HLD 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAS 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (de)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIos (de)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Osee 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OSEE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (de)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (de)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (de)", ->
`
expect(p.parse("Abdias 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIAS 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIah (de)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonas 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAS 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (de)", ->
`
expect(p.parse("Michaas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Michäas 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHAAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHÄAS 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (de)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (de)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (de)", ->
`
expect(p.parse("Sophonias 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zephanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Soph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOPHONIAS 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPHANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (de)", ->
`
expect(p.parse("Aggaus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Aggäus 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Agg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGGAUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGGÄUS 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (de)", ->
`
expect(p.parse("Zacharias 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sacharja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIAS 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACHARJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (de)", ->
`
expect(p.parse("Malachias 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALACHIAS 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIatt (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIatt (de)", ->
`
expect(p.parse("Matthaus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthäus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHAUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÄUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (de)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIuke (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIuke (de)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (de)", ->
`
expect(p.parse("Erste Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (de)", ->
`
expect(p.parse("Zweite Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (de)", ->
`
expect(p.parse("Dritte Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DRITTE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (de)", ->
`
expect(p.parse("Apostelgeschichte 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apg 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTELGESCHICHTE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APG 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (de)", ->
`
expect(p.parse("Roemer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Römer 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Röm 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROEMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖMER 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RÖM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (de)", ->
`
expect(p.parse("Zweite Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinther 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinther 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (de)", ->
`
expect(p.parse("Galater 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATER 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (de)", ->
`
expect(p.parse("Epheser 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EPHESER 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (de)", ->
`
expect(p.parse("Philipper 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILIPPER 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (de)", ->
`
expect(p.parse("Kolosser 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSER 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (de)", ->
`
expect(p.parse("Zweite Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicher 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICHER 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (de)", ->
`
expect(p.parse("Erste Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicher 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICHER 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (de)", ->
`
expect(p.parse("Zweite Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (de)", ->
`
expect(p.parse("Erste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (de)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (de)", ->
`
expect(p.parse("Philemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("PHILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (de)", ->
`
expect(p.parse("Hebraeer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebraer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebräer 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRAEER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRAER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBRÄER 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (de)", ->
`
expect(p.parse("Jakobusbrief 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUSBRIEF 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (de)", ->
`
expect(p.parse("Zweite Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ZWEITE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (de)", ->
`
expect(p.parse("Erste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIude (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIude (de)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (de)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (de)", ->
`
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (de)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (de)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI und die Alten 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI im Bade 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (de)", ->
`
expect(p.parse("Zweite Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Zweite Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabaer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabäer 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (de)", ->
`
expect(p.parse("Dritte Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Dritte Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabaer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabäer 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (de)", ->
`
expect(p.parse("Vierte Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierte Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabaer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabäer 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (de)", ->
`
expect(p.parse("Erste Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Erste Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabaer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabäer 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezra,Esth (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra,Esth (de)", ->
`
expect(p.parse("Es 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ES 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Phil,Phlm (de)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil,Phlm (de)", ->
`
expect(p.parse("Ph 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("PH 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["de"]
it "should handle ranges (de)", ->
expect(p.parse("Titus 1:1 bis 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1bis2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 BIS 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (de)", ->
expect(p.parse("Titus 1:1, Kapitel 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAPITEL 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, Kap 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 KAP 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (de)", ->
expect(p.parse("Exod 1:1 Verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Ver 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VER 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (de)", ->
expect(p.parse("Exod 1:1 und 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 UND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL. 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (de)", ->
expect(p.parse("Ps 3 Titel, 4:2, 5:Titel").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITEL, 4:2, 5:TITEL").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (de)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (de)", ->
expect(p.parse("Lev 1 (ELB)").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("lev 1 elb").osis_and_translations()).toEqual [["Lev.1", "ELB"]]
expect(p.parse("Lev 1 (HFA)").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("lev 1 hfa").osis_and_translations()).toEqual [["Lev.1", "HFA"]]
expect(p.parse("Lev 1 (LUTH1545)").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("lev 1 luth1545").osis_and_translations()).toEqual [["Lev.1", "LUTH1545"]]
expect(p.parse("Lev 1 (LUTHER)").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("lev 1 luther").osis_and_translations()).toEqual [["Lev.1", "LUTHER"]]
expect(p.parse("Lev 1 (SCH1950)").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("lev 1 sch1950").osis_and_translations()).toEqual [["Lev.1", "SCH1950"]]
expect(p.parse("Lev 1 (SCH2000)").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
expect(p.parse("lev 1 sch2000").osis_and_translations()).toEqual [["Lev.1", "SCH2000"]]
it "should handle book ranges (de)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Erste bis Dritte Johannes").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (de)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "tomatically would be best\n\t\t\t\t\t\t\temail_address = \"isaiahodhner@gmail.com\"\n\t\t\t\t\t\t\tnew_issue_url = \"https://github.com/1j01/",
"end": 1196,
"score": 0.9998877048492432,
"start": 1174,
"tag": "EMAIL",
"value": "isaiahodhner@gmail.com"
},
{
"context": "l.... | src/audio-clips.coffee | 1j01/wavey | 69 |
localforage = require "localforage"
class AudioClipStorage
constructor: ->
# TODO: make these into Maps
@audio_buffers = {}
@recordings = {}
@loading = {}
@_errors = {}
@_suggestions = {}
# dependencies must be injected before use
# XXX: I'm sure there's a cleaner way of doing this
@InfoBar = null
@remove_broken_clips = null
has_error: (clip)=>
@_errors[clip.audio_id]
show_error: (clip)=>
error_message = @_errors[clip.audio_id]
suggestion = @_suggestions[clip.audio_id]
# TODO: instead of taking "suggestions" and desugaring them into buttons,
# just take buttons in fail_warn, created via sugar functions
# this will also make the actual error object available for issue reporting
@InfoBar.warn error_message,
switch suggestion
when "remove_broken_clips"
[{
label: "Remove broken clips"
action: @remove_broken_clips
}]
when "reload_app"
[{
label: "Reload app"
action: -> location.reload()
}]
###
when "report_issue"
[{
label: "Report issue"
action: ->
# TODO: actually logging errors automatically would be best
email_address = "isaiahodhner@gmail.com"
new_issue_url = "https://github.com/1j01/wavey/issues/new"
labels = ["Bug", "Error"]
title = "Error: #{error_message}"
issue_description = """
<!--
hint/message to issue-reporter here
ask for details (ideally steps to reproduce)
make it clear this won't be included in the message
-->
```
#{error}
```
"""
email_body = """
[Knowing that the error occurred is most]
[If you can, please give steps you took before this error occurred]
#{error}
"""
enc = encodeURIComponent
labels_qs = ""
for label, index in labels
labels_qs += "&" if index > 0
labels_qs += "labels[]=#{enc label}"
query_string = "#{labels_qs}&title=#{enc title}&body=#{enc issue_description}"
new_issue_url = "#{new_issue_url}?#{query_string}"
email_url = "mailto:${email_address}?subject=#{enc title}&body=#{enc email_body}"
window.open(new_issue_url)
}]
###
throttle = 0
load_clip: (clip)=>
return if @audio_buffers[clip.audio_id]?
return if @loading[clip.audio_id]?
return if @_errors[clip.audio_id]?
@loading[clip.audio_id] = yes
fail_warn = (error_message, options={})=>
{suggestion} = options
suggestion ?= "remove_broken_clips"
@_errors[clip.audio_id] = error_message
@_suggestions[clip.audio_id] = suggestion
@loading[clip.audio_id] = no
@show_error(clip)
handle_localforage_error = (error_message, error)=>
if error instanceof RangeError
error_message = "We appear to have run out of memory loading the document."
else
error_message = "#{error_message} #{error.message}"
# TODO: issue report button too
fail_warn error_message, {suggestion: "reload_app"}
if clip.recording_id?
recording_storage_key = "recording:#{clip.recording_id}"
localforage.getItem recording_storage_key, (err, recording)=>
if err
handle_localforage_error "Failed to load a recording.", err
console.error "Error loading #{recording_storage_key}"
else if recording
@recordings[clip.recording_id] = recording
chunks = [[], []]
total_loaded_chunks = 0
for channel_chunk_ids, channel_index in recording.chunk_ids
for chunk_id, chunk_index in channel_chunk_ids
do (channel_chunk_ids, channel_index, chunk_id, chunk_index)=>
# throttling to avoid DOMException: The transaction was aborted, so the request cannot be fulfilled.
# Internal error: Too many transactions queued.
# https://code.google.com/p/chromium/issues/detail?id=338800
setTimeout ->
chunk_storage_key = "recording:#{clip.recording_id}:chunk:#{chunk_id}"
localforage.getItem chunk_storage_key, (err, typed_array)=>
if err
handle_localforage_error "Failed to load part of a recording.", err
console.error "Error loading a chunk of a recording (key #{chunk_storage_key})", clip, recording
else if typed_array
chunks[channel_index][chunk_index] = typed_array
total_loaded_chunks += 1
throttle -= 1 # this will not unthrottle anything during the document load
if total_loaded_chunks is recording.chunk_ids.length * channel_chunk_ids.length
recording.chunks = chunks
render()
else
fail_warn "Part of a recording is missing from storage."
console.warn "A chunk of a recording is missing from storage (key #{chunk_storage_key})", clip, recording
, throttle += 1
if channel_chunk_ids.length is 0 and channel_index is recording.chunk_ids.length - 1
recording.chunks = chunks
render()
else
fail_warn "A recording is missing from storage."
console.warn "A recording is missing from storage (key #{recording_storage_key}) for clip:", clip
else
audio_storage_key = "audio:#{clip.audio_id}"
localforage.getItem audio_storage_key, (err, array_buffer)=>
if err
handle_localforage_error "Failed to load audio data.", err
console.error "Error loading #{audio_storage_key}"
else if array_buffer
actx.decodeAudioData array_buffer, (buffer)=>
@audio_buffers[clip.audio_id] = buffer
# TODO: um, only hide this if it's all finished?
@InfoBar.hide "Not all tracks have finished loading."
render()
else
fail_warn "An audio clip is missing from storage."
console.warn "Audio data is missing from storage (key #{audio_storage_key}) for clip:", clip
load_clips: (tracks)=>
for track in tracks when track.type is "audio"
for clip in track.clips
@load_clip clip
module.exports = new AudioClipStorage
| 175580 |
localforage = require "localforage"
class AudioClipStorage
constructor: ->
# TODO: make these into Maps
@audio_buffers = {}
@recordings = {}
@loading = {}
@_errors = {}
@_suggestions = {}
# dependencies must be injected before use
# XXX: I'm sure there's a cleaner way of doing this
@InfoBar = null
@remove_broken_clips = null
has_error: (clip)=>
@_errors[clip.audio_id]
show_error: (clip)=>
error_message = @_errors[clip.audio_id]
suggestion = @_suggestions[clip.audio_id]
# TODO: instead of taking "suggestions" and desugaring them into buttons,
# just take buttons in fail_warn, created via sugar functions
# this will also make the actual error object available for issue reporting
@InfoBar.warn error_message,
switch suggestion
when "remove_broken_clips"
[{
label: "Remove broken clips"
action: @remove_broken_clips
}]
when "reload_app"
[{
label: "Reload app"
action: -> location.reload()
}]
###
when "report_issue"
[{
label: "Report issue"
action: ->
# TODO: actually logging errors automatically would be best
email_address = "<EMAIL>"
new_issue_url = "https://github.com/1j01/wavey/issues/new"
labels = ["Bug", "Error"]
title = "Error: #{error_message}"
issue_description = """
<!--
hint/message to issue-reporter here
ask for details (ideally steps to reproduce)
make it clear this won't be included in the message
-->
```
#{error}
```
"""
email_body = """
[Knowing that the error occurred is most]
[If you can, please give steps you took before this error occurred]
#{error}
"""
enc = encodeURIComponent
labels_qs = ""
for label, index in labels
labels_qs += "&" if index > 0
labels_qs += "labels[]=#{enc label}"
query_string = "#{labels_qs}&title=#{enc title}&body=#{enc issue_description}"
new_issue_url = "#{new_issue_url}?#{query_string}"
email_url = "mailto:${email_address}?subject=#{enc title}&body=#{enc email_body}"
window.open(new_issue_url)
}]
###
throttle = 0
load_clip: (clip)=>
return if @audio_buffers[clip.audio_id]?
return if @loading[clip.audio_id]?
return if @_errors[clip.audio_id]?
@loading[clip.audio_id] = yes
fail_warn = (error_message, options={})=>
{suggestion} = options
suggestion ?= "remove_broken_clips"
@_errors[clip.audio_id] = error_message
@_suggestions[clip.audio_id] = suggestion
@loading[clip.audio_id] = no
@show_error(clip)
handle_localforage_error = (error_message, error)=>
if error instanceof RangeError
error_message = "We appear to have run out of memory loading the document."
else
error_message = "#{error_message} #{error.message}"
# TODO: issue report button too
fail_warn error_message, {suggestion: "reload_app"}
if clip.recording_id?
recording_storage_key = "recording:<KEY>{clip.recording_id}"
localforage.getItem recording_storage_key, (err, recording)=>
if err
handle_localforage_error "Failed to load a recording.", err
console.error "Error loading #{recording_storage_key}"
else if recording
@recordings[clip.recording_id] = recording
chunks = [[], []]
total_loaded_chunks = 0
for channel_chunk_ids, channel_index in recording.chunk_ids
for chunk_id, chunk_index in channel_chunk_ids
do (channel_chunk_ids, channel_index, chunk_id, chunk_index)=>
# throttling to avoid DOMException: The transaction was aborted, so the request cannot be fulfilled.
# Internal error: Too many transactions queued.
# https://code.google.com/p/chromium/issues/detail?id=338800
setTimeout ->
chunk_storage_key = "<KEY>clip.recording_<KEY>
localforage.getItem chunk_storage_key, (err, typed_array)=>
if err
handle_localforage_error "Failed to load part of a recording.", err
console.error "Error loading a chunk of a recording (key #{chunk_storage_key})", clip, recording
else if typed_array
chunks[channel_index][chunk_index] = typed_array
total_loaded_chunks += 1
throttle -= 1 # this will not unthrottle anything during the document load
if total_loaded_chunks is recording.chunk_ids.length * channel_chunk_ids.length
recording.chunks = chunks
render()
else
fail_warn "Part of a recording is missing from storage."
console.warn "A chunk of a recording is missing from storage (key #{chunk_storage_key})", clip, recording
, throttle += 1
if channel_chunk_ids.length is 0 and channel_index is recording.chunk_ids.length - 1
recording.chunks = chunks
render()
else
fail_warn "A recording is missing from storage."
console.warn "A recording is missing from storage (key #{recording_storage_key}) for clip:", clip
else
audio_storage_key = "audio:<KEY>clip.audio_<KEY>
localforage.getItem audio_storage_key, (err, array_buffer)=>
if err
handle_localforage_error "Failed to load audio data.", err
console.error "Error loading #{audio_storage_key}"
else if array_buffer
actx.decodeAudioData array_buffer, (buffer)=>
@audio_buffers[clip.audio_id] = buffer
# TODO: um, only hide this if it's all finished?
@InfoBar.hide "Not all tracks have finished loading."
render()
else
fail_warn "An audio clip is missing from storage."
console.warn "Audio data is missing from storage (key #{audio_storage_key}) for clip:", clip
load_clips: (tracks)=>
for track in tracks when track.type is "audio"
for clip in track.clips
@load_clip clip
module.exports = new AudioClipStorage
| true |
localforage = require "localforage"
class AudioClipStorage
constructor: ->
# TODO: make these into Maps
@audio_buffers = {}
@recordings = {}
@loading = {}
@_errors = {}
@_suggestions = {}
# dependencies must be injected before use
# XXX: I'm sure there's a cleaner way of doing this
@InfoBar = null
@remove_broken_clips = null
has_error: (clip)=>
@_errors[clip.audio_id]
show_error: (clip)=>
error_message = @_errors[clip.audio_id]
suggestion = @_suggestions[clip.audio_id]
# TODO: instead of taking "suggestions" and desugaring them into buttons,
# just take buttons in fail_warn, created via sugar functions
# this will also make the actual error object available for issue reporting
@InfoBar.warn error_message,
switch suggestion
when "remove_broken_clips"
[{
label: "Remove broken clips"
action: @remove_broken_clips
}]
when "reload_app"
[{
label: "Reload app"
action: -> location.reload()
}]
###
when "report_issue"
[{
label: "Report issue"
action: ->
# TODO: actually logging errors automatically would be best
email_address = "PI:EMAIL:<EMAIL>END_PI"
new_issue_url = "https://github.com/1j01/wavey/issues/new"
labels = ["Bug", "Error"]
title = "Error: #{error_message}"
issue_description = """
<!--
hint/message to issue-reporter here
ask for details (ideally steps to reproduce)
make it clear this won't be included in the message
-->
```
#{error}
```
"""
email_body = """
[Knowing that the error occurred is most]
[If you can, please give steps you took before this error occurred]
#{error}
"""
enc = encodeURIComponent
labels_qs = ""
for label, index in labels
labels_qs += "&" if index > 0
labels_qs += "labels[]=#{enc label}"
query_string = "#{labels_qs}&title=#{enc title}&body=#{enc issue_description}"
new_issue_url = "#{new_issue_url}?#{query_string}"
email_url = "mailto:${email_address}?subject=#{enc title}&body=#{enc email_body}"
window.open(new_issue_url)
}]
###
throttle = 0
load_clip: (clip)=>
return if @audio_buffers[clip.audio_id]?
return if @loading[clip.audio_id]?
return if @_errors[clip.audio_id]?
@loading[clip.audio_id] = yes
fail_warn = (error_message, options={})=>
{suggestion} = options
suggestion ?= "remove_broken_clips"
@_errors[clip.audio_id] = error_message
@_suggestions[clip.audio_id] = suggestion
@loading[clip.audio_id] = no
@show_error(clip)
handle_localforage_error = (error_message, error)=>
if error instanceof RangeError
error_message = "We appear to have run out of memory loading the document."
else
error_message = "#{error_message} #{error.message}"
# TODO: issue report button too
fail_warn error_message, {suggestion: "reload_app"}
if clip.recording_id?
recording_storage_key = "recording:PI:KEY:<KEY>END_PI{clip.recording_id}"
localforage.getItem recording_storage_key, (err, recording)=>
if err
handle_localforage_error "Failed to load a recording.", err
console.error "Error loading #{recording_storage_key}"
else if recording
@recordings[clip.recording_id] = recording
chunks = [[], []]
total_loaded_chunks = 0
for channel_chunk_ids, channel_index in recording.chunk_ids
for chunk_id, chunk_index in channel_chunk_ids
do (channel_chunk_ids, channel_index, chunk_id, chunk_index)=>
# throttling to avoid DOMException: The transaction was aborted, so the request cannot be fulfilled.
# Internal error: Too many transactions queued.
# https://code.google.com/p/chromium/issues/detail?id=338800
setTimeout ->
chunk_storage_key = "PI:KEY:<KEY>END_PIclip.recording_PI:KEY:<KEY>END_PI
localforage.getItem chunk_storage_key, (err, typed_array)=>
if err
handle_localforage_error "Failed to load part of a recording.", err
console.error "Error loading a chunk of a recording (key #{chunk_storage_key})", clip, recording
else if typed_array
chunks[channel_index][chunk_index] = typed_array
total_loaded_chunks += 1
throttle -= 1 # this will not unthrottle anything during the document load
if total_loaded_chunks is recording.chunk_ids.length * channel_chunk_ids.length
recording.chunks = chunks
render()
else
fail_warn "Part of a recording is missing from storage."
console.warn "A chunk of a recording is missing from storage (key #{chunk_storage_key})", clip, recording
, throttle += 1
if channel_chunk_ids.length is 0 and channel_index is recording.chunk_ids.length - 1
recording.chunks = chunks
render()
else
fail_warn "A recording is missing from storage."
console.warn "A recording is missing from storage (key #{recording_storage_key}) for clip:", clip
else
audio_storage_key = "audio:PI:KEY:<KEY>END_PIclip.audio_PI:KEY:<KEY>END_PI
localforage.getItem audio_storage_key, (err, array_buffer)=>
if err
handle_localforage_error "Failed to load audio data.", err
console.error "Error loading #{audio_storage_key}"
else if array_buffer
actx.decodeAudioData array_buffer, (buffer)=>
@audio_buffers[clip.audio_id] = buffer
# TODO: um, only hide this if it's all finished?
@InfoBar.hide "Not all tracks have finished loading."
render()
else
fail_warn "An audio clip is missing from storage."
console.warn "Audio data is missing from storage (key #{audio_storage_key}) for clip:", clip
load_clips: (tracks)=>
for track in tracks when track.type is "audio"
for clip in track.clips
@load_clip clip
module.exports = new AudioClipStorage
|
[
{
"context": " should probably handle nested bracket pairs!?! -- Allan'\n 'end': '\\\\}'\n 'endCaptures':\n ",
"end": 10015,
"score": 0.9905258417129517,
"start": 10010,
"tag": "NAME",
"value": "Allan"
}
] | grammars/sql.cson | halcyon-dayz/language-sql | 40 | 'scopeName': 'source.sql'
'name': 'SQL'
'fileTypes': [
'ddl'
'dml'
'dsql'
'hql'
'pgsql'
'psql'
'q'
'sql'
]
'patterns': [
{
'include': '#comments'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'4':
'name': 'keyword.other.DML.sql'
'6':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(create(?:\\s+or\\s+replace)?)\\s+(aggregate|conversion|database|domain|function|group|(unique\\s+)?index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+not\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\5)?'
'name': 'meta.create.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.sql'
'3':
'name': 'keyword.other.DML.sql'
'5':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(drop)\\s+(aggregate|check|constraint|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\4)?'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'match': '(?i:^\\s*(add)\\s+(check|constraint|(?:foreign|primary)\\s+key))'
'name': 'meta.add.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(drop)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(truncate)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.truncate.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'4':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(alter)\\s+(aggregate|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\s+([\'"`]?)(\\w+)\\3)'
'name': 'meta.alter.sql'
}
{
'match': '(?i)\\b(bigserial|boolean|box|bytea|cidr|circle|date|datetime|datetime2|double\\s+precision|enum|inet|integer|interval|line|lseg|macaddr|money|oid|path|point|polygon|real|serial|sysdate|text|uniqueidentifier)\\b'
'name': 'storage.type.sql'
}
{
'match': '(?i)\\b(bigint|bit(?:\\s+varying)?|n?char|character(?:\\s+varying)?|float|int|number|smallint|time(?:stamp)?tz|tinyint|n?varchar\\d?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(numeric|decimal)\\b(?:\\s*(\\()\\s*(\\d*)(?:\\s*(,)\\s*(\\d*))?\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.separator.parameters.comma.sql'
'5':
'name': 'constant.numeric.sql'
'6':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(time(?:stamp)?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?(?:\\s*(with(?:out)?\\s+time\\s+zone)\\b)?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
'5':
'name': 'storage.type.sql'
}
{
'match': '(?i:\\b((?:primary|foreign)\\s+key|references|on\\s+delete(\\s+cascade)?|check|constraint|unique|default)\\b)'
'name': 'storage.modifier.sql'
}
{
'match': '\\b(\\d+\\.\\d+)|(\\d+\\.)|(\\.\\d+)|\\b(\\d+)\\b'
'name': 'constant.numeric.sql'
}
{
'match': '(?i:\\b(select(\\s+distinct)?|insert\\s+(ignore\\s+)?into|update|delete|from|use|declare|set|where|group\\s+by|or|like|between|and|(union|except|intersect)(\\s+all)?|having|order\\s+by|partition\\s+by|limit|offset|(inner|cross)\\s+join|join|straight_join|(left|right)(\\s+outer)?\\s+join|natural(\\s+(left|right)(\\s+outer)?)?\\s+join|using|regexp|rlike|with|exists)\\b)'
'name': 'keyword.other.DML.sql'
}
{
'match': '(?i:\\b(on|(is\\s+)?(not\\s+)?null|auto_increment)\\b)'
'name': 'keyword.other.DDL.create.II.sql'
}
{
'match': '(?i:\\b(values|go|into|exec|openquery)\\b)'
'name': 'keyword.other.DML.II.sql'
}
{
'match': '(?i:\\b(begin(\\s+work)?|start\\s+transaction|commit(\\s+work)?|rollback(\\s+work)?)\\b)'
'name': 'keyword.other.LUW.sql'
}
{
'match': '(?i:\\b(grant(\\swith\\sgrant\\soption)?|revoke)\\b)'
'name': 'keyword.other.authorization.sql'
}
{
'match': '(?i:(\\bnot\\s+)?\\bin\\b)'
'name': 'keyword.other.data-integrity.sql'
}
{
'match': '(?i:\\bnot\\b)'
'name': 'keyword.other.not.sql'
}
{
'match': '(?i:^\\s*(comment\\s+on\\s+(table|column|aggregate|constraint|database|domain|function|index|operator|rule|schema|sequence|trigger|type|view))\\s+.*?\\s+(is)\\s+)'
'name': 'keyword.other.object-comments.sql'
}
{
'match': '(?i)\\bAS\\b'
'name': 'keyword.other.alias.sql'
}
{
'match': '(?i)\\b(DESC|ASC)\\b'
'name': 'keyword.other.order.sql'
}
{
'match': '(?i)\\b(case|when|then|else|end)\\b'
'name': 'keyword.other.case.sql'
}
{
'match': '(?i)\\b(coalesce|nvl|nullif)\\b'
'name': 'keyword.other.conditional.sql'
}
{
'match': '\\*'
'name': 'keyword.operator.star.sql'
}
{
'match': '[!<>]?=|<>|<|>'
'name': 'keyword.operator.comparison.sql'
}
{
'match': '-|\\+|/'
'name': 'keyword.operator.math.sql'
}
{
'match': '\\|\\|'
'name': 'keyword.operator.concatenator.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(CURRENT_(DATE|TIME(STAMP)?|USER)|(SESSION|SYSTEM)_USER|EXTRACT)\\b'
'name': 'support.function.scalar.sql'
}
{
'match': '(?i)\\b(ABS|ACOS|ASIN|ATAN|ATAN2|CEIL|CEILING|CONV|COS|COT|CRC32|DEGREES|EXP|FLOOR|LN|LOG|LOG10|LOG2|MOD|PI|POW|POWER|RADIANS|RAND|ROUND|SIGN|SIN|SQRT|TAN|TRUNCATE)\\b'
'name': 'support.function.math.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(AVG|COUNT|MIN|MAX|SUM)(?=\\s*\\()'
'name': 'support.function.aggregate.sql'
}
{
'match': '(?i)\\b(CONCATENATE|CONVERT|LOWER|SUBSTRING|TRANSLATE|TRIM|UPPER)\\b'
'name': 'support.function.string.sql'
}
{
'match': '(?i)\\b(ROW_NUMBER|RANK|DENSE_RANK|PERCENT_RANK|CUME_DIST|NTILE|LAG|LEAD|FIRST_VALUE|LAST_VALUE|NTH_VALUE|OVER)\\b'
'name': 'support.function.window.sql'
}
{
'captures':
'1':
'name': 'constant.other.database-name.sql'
'2':
'name': 'punctuation.separator.period.sql'
'3':
'name': 'constant.other.table-name.sql'
'match': '(\\w+?)(\\.)(\\w+)'
}
{
'include': '#strings'
}
{
'include': '#regexps'
}
{
'include': '#punctuation'
}
]
'repository':
'comments':
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '$'
'name': 'comment.line.double-dash.sql'
}
{
'begin': '/\\*'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '\\*/'
'endCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'name': 'comment.block.sql'
}
]
'punctuation':
'patterns': [
{
'begin': '\\('
'end': '\\)'
'beginCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.begin.sql'
'endCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.end.sql'
'patterns': [
{
'include': '$self'
}
]
}
{
'match': '\\)'
'name': 'punctuation.unmatched.bracket.round.end.sql'
}
{
'match': ','
'name': 'punctuation.separator.comma.sql'
}
{
'match': '\\.'
'name': 'punctuation.separator.period.sql'
}
{
'match': ';'
'name': 'punctuation.terminator.statement.semicolon.sql'
}
]
'regexps':
'patterns': [
{
'begin': '/(?=\\S.*/)'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '/'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.sql'
'patterns': [
{
'include': '#string_interpolation'
}
{
'match': '\\\\/'
'name': 'constant.character.escape.slash.sql'
}
]
}
{
'begin': '%r\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'comment': 'We should probably handle nested bracket pairs!?! -- Allan'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.modr.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
'string_escape':
'match': '\\\\.'
'name': 'constant.character.escape.sql'
'string_interpolation':
'captures':
'1':
'name': 'punctuation.definition.string.end.sql'
'match': '(#\\{)([^\\}]*)(\\})'
'name': 'string.interpolated.sql'
'strings':
'patterns': [
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(\')[^\'\\\\]*(\')'
'name': 'string.quoted.single.sql'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.single.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(`)[^`\\\\]*(`)'
'name': 'string.quoted.other.backtick.sql'
}
{
'begin': '`'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '`'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.other.backtick.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(")[^"#]*(")'
'name': 'string.quoted.double.sql'
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.double.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
{
'begin': '%\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.other.quoted.brackets.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
| 219766 | 'scopeName': 'source.sql'
'name': 'SQL'
'fileTypes': [
'ddl'
'dml'
'dsql'
'hql'
'pgsql'
'psql'
'q'
'sql'
]
'patterns': [
{
'include': '#comments'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'4':
'name': 'keyword.other.DML.sql'
'6':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(create(?:\\s+or\\s+replace)?)\\s+(aggregate|conversion|database|domain|function|group|(unique\\s+)?index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+not\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\5)?'
'name': 'meta.create.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.sql'
'3':
'name': 'keyword.other.DML.sql'
'5':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(drop)\\s+(aggregate|check|constraint|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\4)?'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'match': '(?i:^\\s*(add)\\s+(check|constraint|(?:foreign|primary)\\s+key))'
'name': 'meta.add.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(drop)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(truncate)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.truncate.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'4':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(alter)\\s+(aggregate|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\s+([\'"`]?)(\\w+)\\3)'
'name': 'meta.alter.sql'
}
{
'match': '(?i)\\b(bigserial|boolean|box|bytea|cidr|circle|date|datetime|datetime2|double\\s+precision|enum|inet|integer|interval|line|lseg|macaddr|money|oid|path|point|polygon|real|serial|sysdate|text|uniqueidentifier)\\b'
'name': 'storage.type.sql'
}
{
'match': '(?i)\\b(bigint|bit(?:\\s+varying)?|n?char|character(?:\\s+varying)?|float|int|number|smallint|time(?:stamp)?tz|tinyint|n?varchar\\d?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(numeric|decimal)\\b(?:\\s*(\\()\\s*(\\d*)(?:\\s*(,)\\s*(\\d*))?\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.separator.parameters.comma.sql'
'5':
'name': 'constant.numeric.sql'
'6':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(time(?:stamp)?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?(?:\\s*(with(?:out)?\\s+time\\s+zone)\\b)?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
'5':
'name': 'storage.type.sql'
}
{
'match': '(?i:\\b((?:primary|foreign)\\s+key|references|on\\s+delete(\\s+cascade)?|check|constraint|unique|default)\\b)'
'name': 'storage.modifier.sql'
}
{
'match': '\\b(\\d+\\.\\d+)|(\\d+\\.)|(\\.\\d+)|\\b(\\d+)\\b'
'name': 'constant.numeric.sql'
}
{
'match': '(?i:\\b(select(\\s+distinct)?|insert\\s+(ignore\\s+)?into|update|delete|from|use|declare|set|where|group\\s+by|or|like|between|and|(union|except|intersect)(\\s+all)?|having|order\\s+by|partition\\s+by|limit|offset|(inner|cross)\\s+join|join|straight_join|(left|right)(\\s+outer)?\\s+join|natural(\\s+(left|right)(\\s+outer)?)?\\s+join|using|regexp|rlike|with|exists)\\b)'
'name': 'keyword.other.DML.sql'
}
{
'match': '(?i:\\b(on|(is\\s+)?(not\\s+)?null|auto_increment)\\b)'
'name': 'keyword.other.DDL.create.II.sql'
}
{
'match': '(?i:\\b(values|go|into|exec|openquery)\\b)'
'name': 'keyword.other.DML.II.sql'
}
{
'match': '(?i:\\b(begin(\\s+work)?|start\\s+transaction|commit(\\s+work)?|rollback(\\s+work)?)\\b)'
'name': 'keyword.other.LUW.sql'
}
{
'match': '(?i:\\b(grant(\\swith\\sgrant\\soption)?|revoke)\\b)'
'name': 'keyword.other.authorization.sql'
}
{
'match': '(?i:(\\bnot\\s+)?\\bin\\b)'
'name': 'keyword.other.data-integrity.sql'
}
{
'match': '(?i:\\bnot\\b)'
'name': 'keyword.other.not.sql'
}
{
'match': '(?i:^\\s*(comment\\s+on\\s+(table|column|aggregate|constraint|database|domain|function|index|operator|rule|schema|sequence|trigger|type|view))\\s+.*?\\s+(is)\\s+)'
'name': 'keyword.other.object-comments.sql'
}
{
'match': '(?i)\\bAS\\b'
'name': 'keyword.other.alias.sql'
}
{
'match': '(?i)\\b(DESC|ASC)\\b'
'name': 'keyword.other.order.sql'
}
{
'match': '(?i)\\b(case|when|then|else|end)\\b'
'name': 'keyword.other.case.sql'
}
{
'match': '(?i)\\b(coalesce|nvl|nullif)\\b'
'name': 'keyword.other.conditional.sql'
}
{
'match': '\\*'
'name': 'keyword.operator.star.sql'
}
{
'match': '[!<>]?=|<>|<|>'
'name': 'keyword.operator.comparison.sql'
}
{
'match': '-|\\+|/'
'name': 'keyword.operator.math.sql'
}
{
'match': '\\|\\|'
'name': 'keyword.operator.concatenator.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(CURRENT_(DATE|TIME(STAMP)?|USER)|(SESSION|SYSTEM)_USER|EXTRACT)\\b'
'name': 'support.function.scalar.sql'
}
{
'match': '(?i)\\b(ABS|ACOS|ASIN|ATAN|ATAN2|CEIL|CEILING|CONV|COS|COT|CRC32|DEGREES|EXP|FLOOR|LN|LOG|LOG10|LOG2|MOD|PI|POW|POWER|RADIANS|RAND|ROUND|SIGN|SIN|SQRT|TAN|TRUNCATE)\\b'
'name': 'support.function.math.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(AVG|COUNT|MIN|MAX|SUM)(?=\\s*\\()'
'name': 'support.function.aggregate.sql'
}
{
'match': '(?i)\\b(CONCATENATE|CONVERT|LOWER|SUBSTRING|TRANSLATE|TRIM|UPPER)\\b'
'name': 'support.function.string.sql'
}
{
'match': '(?i)\\b(ROW_NUMBER|RANK|DENSE_RANK|PERCENT_RANK|CUME_DIST|NTILE|LAG|LEAD|FIRST_VALUE|LAST_VALUE|NTH_VALUE|OVER)\\b'
'name': 'support.function.window.sql'
}
{
'captures':
'1':
'name': 'constant.other.database-name.sql'
'2':
'name': 'punctuation.separator.period.sql'
'3':
'name': 'constant.other.table-name.sql'
'match': '(\\w+?)(\\.)(\\w+)'
}
{
'include': '#strings'
}
{
'include': '#regexps'
}
{
'include': '#punctuation'
}
]
'repository':
'comments':
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '$'
'name': 'comment.line.double-dash.sql'
}
{
'begin': '/\\*'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '\\*/'
'endCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'name': 'comment.block.sql'
}
]
'punctuation':
'patterns': [
{
'begin': '\\('
'end': '\\)'
'beginCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.begin.sql'
'endCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.end.sql'
'patterns': [
{
'include': '$self'
}
]
}
{
'match': '\\)'
'name': 'punctuation.unmatched.bracket.round.end.sql'
}
{
'match': ','
'name': 'punctuation.separator.comma.sql'
}
{
'match': '\\.'
'name': 'punctuation.separator.period.sql'
}
{
'match': ';'
'name': 'punctuation.terminator.statement.semicolon.sql'
}
]
'regexps':
'patterns': [
{
'begin': '/(?=\\S.*/)'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '/'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.sql'
'patterns': [
{
'include': '#string_interpolation'
}
{
'match': '\\\\/'
'name': 'constant.character.escape.slash.sql'
}
]
}
{
'begin': '%r\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'comment': 'We should probably handle nested bracket pairs!?! -- <NAME>'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.modr.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
'string_escape':
'match': '\\\\.'
'name': 'constant.character.escape.sql'
'string_interpolation':
'captures':
'1':
'name': 'punctuation.definition.string.end.sql'
'match': '(#\\{)([^\\}]*)(\\})'
'name': 'string.interpolated.sql'
'strings':
'patterns': [
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(\')[^\'\\\\]*(\')'
'name': 'string.quoted.single.sql'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.single.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(`)[^`\\\\]*(`)'
'name': 'string.quoted.other.backtick.sql'
}
{
'begin': '`'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '`'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.other.backtick.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(")[^"#]*(")'
'name': 'string.quoted.double.sql'
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.double.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
{
'begin': '%\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.other.quoted.brackets.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
| true | 'scopeName': 'source.sql'
'name': 'SQL'
'fileTypes': [
'ddl'
'dml'
'dsql'
'hql'
'pgsql'
'psql'
'q'
'sql'
]
'patterns': [
{
'include': '#comments'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'4':
'name': 'keyword.other.DML.sql'
'6':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(create(?:\\s+or\\s+replace)?)\\s+(aggregate|conversion|database|domain|function|group|(unique\\s+)?index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+not\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\5)?'
'name': 'meta.create.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.sql'
'3':
'name': 'keyword.other.DML.sql'
'5':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(drop)\\s+(aggregate|check|constraint|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\b(?:\\s+(if\\s+exists)\\b)?)(?:\\s+([\'"`]?)(\\w+)\\4)?'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.sql'
'match': '(?i:^\\s*(add)\\s+(check|constraint|(?:foreign|primary)\\s+key))'
'name': 'meta.add.sql'
}
{
'captures':
'1':
'name': 'keyword.other.drop.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(drop)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.drop.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'3':
'name': 'entity.name.function.sql'
'4':
'name': 'keyword.other.cascade.sql'
'match': '(?i:\\s*(truncate)\\s+(table)\\s+(\\w+)(\\s+cascade)?\\b)'
'name': 'meta.truncate.sql'
}
{
'captures':
'1':
'name': 'keyword.other.create.sql'
'2':
'name': 'keyword.other.table.sql'
'4':
'name': 'entity.name.function.sql'
'match': '(?i:^\\s*(alter)\\s+(aggregate|conversion|database|domain|function|group|index|language|operator class|operator|rule|schema|sequence|table|tablespace|trigger|type|user|view)\\s+([\'"`]?)(\\w+)\\3)'
'name': 'meta.alter.sql'
}
{
'match': '(?i)\\b(bigserial|boolean|box|bytea|cidr|circle|date|datetime|datetime2|double\\s+precision|enum|inet|integer|interval|line|lseg|macaddr|money|oid|path|point|polygon|real|serial|sysdate|text|uniqueidentifier)\\b'
'name': 'storage.type.sql'
}
{
'match': '(?i)\\b(bigint|bit(?:\\s+varying)?|n?char|character(?:\\s+varying)?|float|int|number|smallint|time(?:stamp)?tz|tinyint|n?varchar\\d?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(numeric|decimal)\\b(?:\\s*(\\()\\s*(\\d*)(?:\\s*(,)\\s*(\\d*))?\\s*(\\)))?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.separator.parameters.comma.sql'
'5':
'name': 'constant.numeric.sql'
'6':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
}
{
'match': '(?i)\\b(time(?:stamp)?)\\b(?:\\s*(\\()\\s*(\\d*)\\s*(\\)))?(?:\\s*(with(?:out)?\\s+time\\s+zone)\\b)?'
'captures':
'1':
'name': 'storage.type.sql'
'2':
'name': 'punctuation.definition.parameters.bracket.round.begin.sql'
'3':
'name': 'constant.numeric.sql'
'4':
'name': 'punctuation.definition.parameters.bracket.round.end.sql'
'5':
'name': 'storage.type.sql'
}
{
'match': '(?i:\\b((?:primary|foreign)\\s+key|references|on\\s+delete(\\s+cascade)?|check|constraint|unique|default)\\b)'
'name': 'storage.modifier.sql'
}
{
'match': '\\b(\\d+\\.\\d+)|(\\d+\\.)|(\\.\\d+)|\\b(\\d+)\\b'
'name': 'constant.numeric.sql'
}
{
'match': '(?i:\\b(select(\\s+distinct)?|insert\\s+(ignore\\s+)?into|update|delete|from|use|declare|set|where|group\\s+by|or|like|between|and|(union|except|intersect)(\\s+all)?|having|order\\s+by|partition\\s+by|limit|offset|(inner|cross)\\s+join|join|straight_join|(left|right)(\\s+outer)?\\s+join|natural(\\s+(left|right)(\\s+outer)?)?\\s+join|using|regexp|rlike|with|exists)\\b)'
'name': 'keyword.other.DML.sql'
}
{
'match': '(?i:\\b(on|(is\\s+)?(not\\s+)?null|auto_increment)\\b)'
'name': 'keyword.other.DDL.create.II.sql'
}
{
'match': '(?i:\\b(values|go|into|exec|openquery)\\b)'
'name': 'keyword.other.DML.II.sql'
}
{
'match': '(?i:\\b(begin(\\s+work)?|start\\s+transaction|commit(\\s+work)?|rollback(\\s+work)?)\\b)'
'name': 'keyword.other.LUW.sql'
}
{
'match': '(?i:\\b(grant(\\swith\\sgrant\\soption)?|revoke)\\b)'
'name': 'keyword.other.authorization.sql'
}
{
'match': '(?i:(\\bnot\\s+)?\\bin\\b)'
'name': 'keyword.other.data-integrity.sql'
}
{
'match': '(?i:\\bnot\\b)'
'name': 'keyword.other.not.sql'
}
{
'match': '(?i:^\\s*(comment\\s+on\\s+(table|column|aggregate|constraint|database|domain|function|index|operator|rule|schema|sequence|trigger|type|view))\\s+.*?\\s+(is)\\s+)'
'name': 'keyword.other.object-comments.sql'
}
{
'match': '(?i)\\bAS\\b'
'name': 'keyword.other.alias.sql'
}
{
'match': '(?i)\\b(DESC|ASC)\\b'
'name': 'keyword.other.order.sql'
}
{
'match': '(?i)\\b(case|when|then|else|end)\\b'
'name': 'keyword.other.case.sql'
}
{
'match': '(?i)\\b(coalesce|nvl|nullif)\\b'
'name': 'keyword.other.conditional.sql'
}
{
'match': '\\*'
'name': 'keyword.operator.star.sql'
}
{
'match': '[!<>]?=|<>|<|>'
'name': 'keyword.operator.comparison.sql'
}
{
'match': '-|\\+|/'
'name': 'keyword.operator.math.sql'
}
{
'match': '\\|\\|'
'name': 'keyword.operator.concatenator.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(CURRENT_(DATE|TIME(STAMP)?|USER)|(SESSION|SYSTEM)_USER|EXTRACT)\\b'
'name': 'support.function.scalar.sql'
}
{
'match': '(?i)\\b(ABS|ACOS|ASIN|ATAN|ATAN2|CEIL|CEILING|CONV|COS|COT|CRC32|DEGREES|EXP|FLOOR|LN|LOG|LOG10|LOG2|MOD|PI|POW|POWER|RADIANS|RAND|ROUND|SIGN|SIN|SQRT|TAN|TRUNCATE)\\b'
'name': 'support.function.math.sql'
}
{
'comment': 'List of SQL99 built-in functions from http://www.oreilly.com/catalog/sqlnut/chapter/ch04.html'
'match': '(?i)\\b(AVG|COUNT|MIN|MAX|SUM)(?=\\s*\\()'
'name': 'support.function.aggregate.sql'
}
{
'match': '(?i)\\b(CONCATENATE|CONVERT|LOWER|SUBSTRING|TRANSLATE|TRIM|UPPER)\\b'
'name': 'support.function.string.sql'
}
{
'match': '(?i)\\b(ROW_NUMBER|RANK|DENSE_RANK|PERCENT_RANK|CUME_DIST|NTILE|LAG|LEAD|FIRST_VALUE|LAST_VALUE|NTH_VALUE|OVER)\\b'
'name': 'support.function.window.sql'
}
{
'captures':
'1':
'name': 'constant.other.database-name.sql'
'2':
'name': 'punctuation.separator.period.sql'
'3':
'name': 'constant.other.table-name.sql'
'match': '(\\w+?)(\\.)(\\w+)'
}
{
'include': '#strings'
}
{
'include': '#regexps'
}
{
'include': '#punctuation'
}
]
'repository':
'comments':
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '$'
'name': 'comment.line.double-dash.sql'
}
{
'begin': '/\\*'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'end': '\\*/'
'endCaptures':
'0':
'name': 'punctuation.definition.comment.sql'
'name': 'comment.block.sql'
}
]
'punctuation':
'patterns': [
{
'begin': '\\('
'end': '\\)'
'beginCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.begin.sql'
'endCaptures':
'0':
'name': 'punctuation.definition.section.bracket.round.end.sql'
'patterns': [
{
'include': '$self'
}
]
}
{
'match': '\\)'
'name': 'punctuation.unmatched.bracket.round.end.sql'
}
{
'match': ','
'name': 'punctuation.separator.comma.sql'
}
{
'match': '\\.'
'name': 'punctuation.separator.period.sql'
}
{
'match': ';'
'name': 'punctuation.terminator.statement.semicolon.sql'
}
]
'regexps':
'patterns': [
{
'begin': '/(?=\\S.*/)'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '/'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.sql'
'patterns': [
{
'include': '#string_interpolation'
}
{
'match': '\\\\/'
'name': 'constant.character.escape.slash.sql'
}
]
}
{
'begin': '%r\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'comment': 'We should probably handle nested bracket pairs!?! -- PI:NAME:<NAME>END_PI'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.regexp.modr.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
'string_escape':
'match': '\\\\.'
'name': 'constant.character.escape.sql'
'string_interpolation':
'captures':
'1':
'name': 'punctuation.definition.string.end.sql'
'match': '(#\\{)([^\\}]*)(\\})'
'name': 'string.interpolated.sql'
'strings':
'patterns': [
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(\')[^\'\\\\]*(\')'
'name': 'string.quoted.single.sql'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.single.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(`)[^`\\\\]*(`)'
'name': 'string.quoted.other.backtick.sql'
}
{
'begin': '`'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '`'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.other.backtick.sql'
'patterns': [
{
'include': '#string_escape'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.string.begin.sql'
'2':
'name': 'punctuation.definition.string.end.sql'
'comment': 'this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.'
'match': '(")[^"#]*(")'
'name': 'string.quoted.double.sql'
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.quoted.double.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
{
'begin': '%\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.sql'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.sql'
'name': 'string.other.quoted.brackets.sql'
'patterns': [
{
'include': '#string_interpolation'
}
]
}
]
|
[
{
"context": "kspace')\n expect(credentials.username).toBe('defaultuser')\n expect(credentials.apiKey).toBe('yomama')",
"end": 607,
"score": 0.9995549917221069,
"start": 596,
"tag": "USERNAME",
"value": "defaultuser"
},
{
"context": "aultuser')\n expect(credentials.api... | spec/cloud-credentials-spec.coffee | mtdev2/atom-cloud-sync | 0 | {CloudCredentials, FILENAME} = require '../lib/cloud-credentials'
{File, Directory} = require 'pathwatcher'
path = require 'path'
describe 'CloudCredentials', ->
it "loads from a #{FILENAME} file", ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join root, FILENAME)
credentials = null
f.read(false).then (contents) ->
settings = JSON.parse(contents)
credentials = new CloudCredentials(settings)
waitsFor -> credentials?
runs ->
expect(credentials.provider).toBe('rackspace')
expect(credentials.username).toBe('defaultuser')
expect(credentials.apiKey).toBe('yomama')
expect(credentials.region).toBe('DFW')
it 'creates itself from a File', ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join, FILENAME)
CloudCredentials.createFrom f, (creds) ->
expect(creds.username).toBe('defaultuser')
it "finds the nearest #{FILENAME} up the directory hierarchy", ->
root = atom.project.getRootDirectory().getRealPathSync()
d = new Directory(path.join root, 'sync-description', 'bar')
credentials = null
CloudCredentials.withNearest d, (err, creds) ->
console.log err if err
credentials = creds
waitsFor -> credentials?
runs ->
expect(credentials.username).toBe('defaultuser')
| 98630 | {CloudCredentials, FILENAME} = require '../lib/cloud-credentials'
{File, Directory} = require 'pathwatcher'
path = require 'path'
describe 'CloudCredentials', ->
it "loads from a #{FILENAME} file", ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join root, FILENAME)
credentials = null
f.read(false).then (contents) ->
settings = JSON.parse(contents)
credentials = new CloudCredentials(settings)
waitsFor -> credentials?
runs ->
expect(credentials.provider).toBe('rackspace')
expect(credentials.username).toBe('defaultuser')
expect(credentials.apiKey).toBe('<KEY>')
expect(credentials.region).toBe('DFW')
it 'creates itself from a File', ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join, FILENAME)
CloudCredentials.createFrom f, (creds) ->
expect(creds.username).toBe('defaultuser')
it "finds the nearest #{FILENAME} up the directory hierarchy", ->
root = atom.project.getRootDirectory().getRealPathSync()
d = new Directory(path.join root, 'sync-description', 'bar')
credentials = null
CloudCredentials.withNearest d, (err, creds) ->
console.log err if err
credentials = creds
waitsFor -> credentials?
runs ->
expect(credentials.username).toBe('defaultuser')
| true | {CloudCredentials, FILENAME} = require '../lib/cloud-credentials'
{File, Directory} = require 'pathwatcher'
path = require 'path'
describe 'CloudCredentials', ->
it "loads from a #{FILENAME} file", ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join root, FILENAME)
credentials = null
f.read(false).then (contents) ->
settings = JSON.parse(contents)
credentials = new CloudCredentials(settings)
waitsFor -> credentials?
runs ->
expect(credentials.provider).toBe('rackspace')
expect(credentials.username).toBe('defaultuser')
expect(credentials.apiKey).toBe('PI:KEY:<KEY>END_PI')
expect(credentials.region).toBe('DFW')
it 'creates itself from a File', ->
root = atom.project.getRootDirectory().getRealPathSync()
f = new File(path.join, FILENAME)
CloudCredentials.createFrom f, (creds) ->
expect(creds.username).toBe('defaultuser')
it "finds the nearest #{FILENAME} up the directory hierarchy", ->
root = atom.project.getRootDirectory().getRealPathSync()
d = new Directory(path.join root, 'sync-description', 'bar')
credentials = null
CloudCredentials.withNearest d, (err, creds) ->
console.log err if err
credentials = creds
waitsFor -> credentials?
runs ->
expect(credentials.username).toBe('defaultuser')
|
[
{
"context": "als & Developers (assets)\nProject: Waaave\nAuthors: Julien Le Coupanec, Valerian Saliou\nCopyright: 2014, Waaave\n###\n\n__ ",
"end": 109,
"score": 0.999859631061554,
"start": 91,
"tag": "NAME",
"value": "Julien Le Coupanec"
},
{
"context": "sets)\nProject: Waaave\nAut... | static/src/assets/tutorial/javascripts/tutorial_view_related_tutorials_developers.coffee | valeriansaliou/waaave-web | 1 | ###
Bundle: Tutorial View Related Tutorials & Developers (assets)
Project: Waaave
Authors: Julien Le Coupanec, Valerian Saliou
Copyright: 2014, Waaave
###
__ = window
class TutorialViewRelatedTutorialsDevelopers
init: ->
try
# Selectors
@_window_sel = $ window
@_document_sel = $ document
@_body_sel = @_document_sel.find '.body'
@_timeline_sel = @_body_sel.find '.timeline'
@_items_sel = @_timeline_sel.find '.items'
@_load_more_btn_sel = @_timeline_sel.find '.show-more'
# States
@_is_fetching = false
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.init', error
event_fetch_page: ->
try
@_load_more_btn_sel.find('a.more').click ->
@_fetch_page()
return false
@_window_sel.scroll ->
if (@_load_more_btn_sel isnt null and @_is_fetching isnt true) and
(@_window_sel.scrollTop() >= @_body_sel.height() - @_window_sel.height())
@_fetch_page()
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.event_fetch_page', error
_fetch_page: ->
try
if @_load_more_btn_sel is null
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Nothing more to load...'
return false
if @_is_fetching is true
Console.info 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Already fetching data!'
return false
@_is_fetching = true
load_more_url = @_load_more_btn_sel.attr 'data-url'
if not load_more_url
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Looks like there is nothing to load!'
return false
@_load_more_btn_sel.addClass 'loading'
page_id = LayoutPage.get_id()
$.get(
load_more_url,
(data) ->
if page_id isnt LayoutPage.get_id()
return
data_sel = $ data
if data_sel.is '.related_tutorials'
data_items_sel = data_sel.find '.items'
data_more_btn_sel = data_sel.find '.show-more'
data_page_end_sel = data_sel.find '.page-end'
if data_items_sel.size()
@_items_sel.append data_items_sel
if data_more_btn_sel.size()
@_load_more_btn_sel.replaceWith data_more_btn_sel
@_load_more_btn_sel = data_more_btn_sel
@event_fetch_page()
@_load_more_btn_sel.removeClass 'loading'
@_is_fetching = false
else
@_load_more_btn_sel.replaceWith data_page_end_sel
@_load_more_btn_sel = null
else
@_load_more_btn_sel.replaceWith data_page_end_sel
else
Console.error 'TutorialViewRelatedTutorialsDevelopers._fetch_page[async]', "#{data.status}:#{data.message}"
# Notify async system that DOM has been updated
LayoutPage.fire_dom_updated()
)
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers _fetch_page', error
finally
return true
@TutorialViewRelatedTutorialsDevelopers = new TutorialViewRelatedTutorialsDevelopers
$(document).ready ->
__.TutorialViewRelatedTutorialsDevelopers.init()
__.TutorialViewRelatedTutorialsDevelopers.event_fetch_page()
LayoutRegistry.register_bundle 'TutorialViewRelatedTutorialsDevelopers'
| 163967 | ###
Bundle: Tutorial View Related Tutorials & Developers (assets)
Project: Waaave
Authors: <NAME>, <NAME>
Copyright: 2014, Waaave
###
__ = window
class TutorialViewRelatedTutorialsDevelopers
init: ->
try
# Selectors
@_window_sel = $ window
@_document_sel = $ document
@_body_sel = @_document_sel.find '.body'
@_timeline_sel = @_body_sel.find '.timeline'
@_items_sel = @_timeline_sel.find '.items'
@_load_more_btn_sel = @_timeline_sel.find '.show-more'
# States
@_is_fetching = false
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.init', error
event_fetch_page: ->
try
@_load_more_btn_sel.find('a.more').click ->
@_fetch_page()
return false
@_window_sel.scroll ->
if (@_load_more_btn_sel isnt null and @_is_fetching isnt true) and
(@_window_sel.scrollTop() >= @_body_sel.height() - @_window_sel.height())
@_fetch_page()
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.event_fetch_page', error
_fetch_page: ->
try
if @_load_more_btn_sel is null
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Nothing more to load...'
return false
if @_is_fetching is true
Console.info 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Already fetching data!'
return false
@_is_fetching = true
load_more_url = @_load_more_btn_sel.attr 'data-url'
if not load_more_url
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Looks like there is nothing to load!'
return false
@_load_more_btn_sel.addClass 'loading'
page_id = LayoutPage.get_id()
$.get(
load_more_url,
(data) ->
if page_id isnt LayoutPage.get_id()
return
data_sel = $ data
if data_sel.is '.related_tutorials'
data_items_sel = data_sel.find '.items'
data_more_btn_sel = data_sel.find '.show-more'
data_page_end_sel = data_sel.find '.page-end'
if data_items_sel.size()
@_items_sel.append data_items_sel
if data_more_btn_sel.size()
@_load_more_btn_sel.replaceWith data_more_btn_sel
@_load_more_btn_sel = data_more_btn_sel
@event_fetch_page()
@_load_more_btn_sel.removeClass 'loading'
@_is_fetching = false
else
@_load_more_btn_sel.replaceWith data_page_end_sel
@_load_more_btn_sel = null
else
@_load_more_btn_sel.replaceWith data_page_end_sel
else
Console.error 'TutorialViewRelatedTutorialsDevelopers._fetch_page[async]', "#{data.status}:#{data.message}"
# Notify async system that DOM has been updated
LayoutPage.fire_dom_updated()
)
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers _fetch_page', error
finally
return true
@TutorialViewRelatedTutorialsDevelopers = new TutorialViewRelatedTutorialsDevelopers
$(document).ready ->
__.TutorialViewRelatedTutorialsDevelopers.init()
__.TutorialViewRelatedTutorialsDevelopers.event_fetch_page()
LayoutRegistry.register_bundle 'TutorialViewRelatedTutorialsDevelopers'
| true | ###
Bundle: Tutorial View Related Tutorials & Developers (assets)
Project: Waaave
Authors: PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
Copyright: 2014, Waaave
###
__ = window
class TutorialViewRelatedTutorialsDevelopers
init: ->
try
# Selectors
@_window_sel = $ window
@_document_sel = $ document
@_body_sel = @_document_sel.find '.body'
@_timeline_sel = @_body_sel.find '.timeline'
@_items_sel = @_timeline_sel.find '.items'
@_load_more_btn_sel = @_timeline_sel.find '.show-more'
# States
@_is_fetching = false
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.init', error
event_fetch_page: ->
try
@_load_more_btn_sel.find('a.more').click ->
@_fetch_page()
return false
@_window_sel.scroll ->
if (@_load_more_btn_sel isnt null and @_is_fetching isnt true) and
(@_window_sel.scrollTop() >= @_body_sel.height() - @_window_sel.height())
@_fetch_page()
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers.event_fetch_page', error
_fetch_page: ->
try
if @_load_more_btn_sel is null
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Nothing more to load...'
return false
if @_is_fetching is true
Console.info 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Already fetching data!'
return false
@_is_fetching = true
load_more_url = @_load_more_btn_sel.attr 'data-url'
if not load_more_url
Console.warn 'TutorialViewRelatedTutorialsDevelopers._fetch_page', 'Looks like there is nothing to load!'
return false
@_load_more_btn_sel.addClass 'loading'
page_id = LayoutPage.get_id()
$.get(
load_more_url,
(data) ->
if page_id isnt LayoutPage.get_id()
return
data_sel = $ data
if data_sel.is '.related_tutorials'
data_items_sel = data_sel.find '.items'
data_more_btn_sel = data_sel.find '.show-more'
data_page_end_sel = data_sel.find '.page-end'
if data_items_sel.size()
@_items_sel.append data_items_sel
if data_more_btn_sel.size()
@_load_more_btn_sel.replaceWith data_more_btn_sel
@_load_more_btn_sel = data_more_btn_sel
@event_fetch_page()
@_load_more_btn_sel.removeClass 'loading'
@_is_fetching = false
else
@_load_more_btn_sel.replaceWith data_page_end_sel
@_load_more_btn_sel = null
else
@_load_more_btn_sel.replaceWith data_page_end_sel
else
Console.error 'TutorialViewRelatedTutorialsDevelopers._fetch_page[async]', "#{data.status}:#{data.message}"
# Notify async system that DOM has been updated
LayoutPage.fire_dom_updated()
)
catch error
Console.error 'TutorialViewRelatedTutorialsDevelopers _fetch_page', error
finally
return true
@TutorialViewRelatedTutorialsDevelopers = new TutorialViewRelatedTutorialsDevelopers
$(document).ready ->
__.TutorialViewRelatedTutorialsDevelopers.init()
__.TutorialViewRelatedTutorialsDevelopers.event_fetch_page()
LayoutRegistry.register_bundle 'TutorialViewRelatedTutorialsDevelopers'
|
[
{
"context": "=================================\n# Copyright 2014 Hatio, Lab.\n# Licensed under The MIT License\n# http://o",
"end": 67,
"score": 0.909584641456604,
"start": 62,
"tag": "NAME",
"value": "Hatio"
}
] | src/spec/SpecRuler.coffee | heartyoh/infopik | 0 | # ==========================================
# Copyright 2014 Hatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS'
], (kin) ->
"use strict"
PIXEL_PER_MM = 3.779527559
drawHorizontal = (context) ->
startX = parseInt(this.getAttr('zeropos'))
marginLeft = this.getAttr('margin')[0]
marginRight = this.width() - this.getAttr('margin')[1]
baseY = this.height() - 15
bottomY = this.height()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusWidth = this.width() - startX
plusCount = Math.ceil(plusWidth / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
minusWidth = startX
minusCount = Math.floor(minusWidth / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
context.strokeText("#{i / 10}", x + 2, baseY + 10)
for i in [10..(minusCount - 1)] by 10
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
context.strokeText("-#{i / 10}", x + 2, baseY + 10)
drawVertical = (context) ->
startY = parseInt(this.getAttr('zeropos'))
marginTop = this.getAttr('margin')[0]
marginBottom = this.height() - this.getAttr('margin')[1]
baseX = this.width() - 15
endX = this.width()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusArea = this.height() - startY
plusCount = Math.ceil(plusArea / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
minusArea = startY
minusCount = Math.floor(minusArea / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
y = startY - i * PIXEL_PER_MM
continue if y > marginBottom
break if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
context.strokeText("#{i / 10}", 1, y + 10)
for i in [10..(minusCount - 1)] by 10
y = startY - i * PIXEL_PER_MM
break if y < marginTop
continue if y > marginBottom
context.strokeText("-#{i / 10}", 1, y + 10)
drawFunc = (context) ->
if this.getAttr('direction') isnt 'vertical'
drawHorizontal.apply this, arguments
else
drawVertical.apply this, arguments
createView = (attributes) ->
new Kinetic.Shape attributes
createHandle = (attributes) ->
new Kin.Rect(attributes)
{
type: 'ruler'
name: 'ruler'
description: 'Ruler Specification'
defaults: {
drawFunc: drawFunc
fill: '#848586'
stroke: '#C2C3C5'
strokeWidth: 0.5
width: 100
height: 50
margin: [15, 15]
zeropos: 15
direction: 'horizontal'
font: '8px Verdana'
}
view_factory_fn: createView
handle_factory_fn: createHandle
toolbox_image: 'images/toolbox_ruler.png'
}
| 69549 | # ==========================================
# Copyright 2014 <NAME>, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS'
], (kin) ->
"use strict"
PIXEL_PER_MM = 3.779527559
drawHorizontal = (context) ->
startX = parseInt(this.getAttr('zeropos'))
marginLeft = this.getAttr('margin')[0]
marginRight = this.width() - this.getAttr('margin')[1]
baseY = this.height() - 15
bottomY = this.height()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusWidth = this.width() - startX
plusCount = Math.ceil(plusWidth / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
minusWidth = startX
minusCount = Math.floor(minusWidth / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
context.strokeText("#{i / 10}", x + 2, baseY + 10)
for i in [10..(minusCount - 1)] by 10
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
context.strokeText("-#{i / 10}", x + 2, baseY + 10)
drawVertical = (context) ->
startY = parseInt(this.getAttr('zeropos'))
marginTop = this.getAttr('margin')[0]
marginBottom = this.height() - this.getAttr('margin')[1]
baseX = this.width() - 15
endX = this.width()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusArea = this.height() - startY
plusCount = Math.ceil(plusArea / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
minusArea = startY
minusCount = Math.floor(minusArea / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
y = startY - i * PIXEL_PER_MM
continue if y > marginBottom
break if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
context.strokeText("#{i / 10}", 1, y + 10)
for i in [10..(minusCount - 1)] by 10
y = startY - i * PIXEL_PER_MM
break if y < marginTop
continue if y > marginBottom
context.strokeText("-#{i / 10}", 1, y + 10)
drawFunc = (context) ->
if this.getAttr('direction') isnt 'vertical'
drawHorizontal.apply this, arguments
else
drawVertical.apply this, arguments
createView = (attributes) ->
new Kinetic.Shape attributes
createHandle = (attributes) ->
new Kin.Rect(attributes)
{
type: 'ruler'
name: 'ruler'
description: 'Ruler Specification'
defaults: {
drawFunc: drawFunc
fill: '#848586'
stroke: '#C2C3C5'
strokeWidth: 0.5
width: 100
height: 50
margin: [15, 15]
zeropos: 15
direction: 'horizontal'
font: '8px Verdana'
}
view_factory_fn: createView
handle_factory_fn: createHandle
toolbox_image: 'images/toolbox_ruler.png'
}
| true | # ==========================================
# Copyright 2014 PI:NAME:<NAME>END_PI, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS'
], (kin) ->
"use strict"
PIXEL_PER_MM = 3.779527559
drawHorizontal = (context) ->
startX = parseInt(this.getAttr('zeropos'))
marginLeft = this.getAttr('margin')[0]
marginRight = this.width() - this.getAttr('margin')[1]
baseY = this.height() - 15
bottomY = this.height()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusWidth = this.width() - startX
plusCount = Math.ceil(plusWidth / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
minusWidth = startX
minusCount = Math.floor(minusWidth / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
if (i % 10 == 0)
context.moveTo(x, baseY)
context.lineTo(x, bottomY)
else if (i % 5 == 0)
context.moveTo(x, baseY + 8)
context.lineTo(x, bottomY)
else
context.moveTo(x, baseY + 11)
context.lineTo(x, bottomY)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
x = startX + i * PIXEL_PER_MM
break if x > marginRight
continue if x < marginLeft
context.strokeText("#{i / 10}", x + 2, baseY + 10)
for i in [10..(minusCount - 1)] by 10
x = startX - i * PIXEL_PER_MM
break if x < marginLeft
continue if x > marginRight
context.strokeText("-#{i / 10}", x + 2, baseY + 10)
drawVertical = (context) ->
startY = parseInt(this.getAttr('zeropos'))
marginTop = this.getAttr('margin')[0]
marginBottom = this.height() - this.getAttr('margin')[1]
baseX = this.width() - 15
endX = this.width()
context.beginPath()
context.moveTo(0, 0)
context.lineTo(0, this.height())
context.lineTo(this.width(), this.height())
context.lineTo(this.width(), 0)
context.lineTo(0, 0)
plusArea = this.height() - startY
plusCount = Math.ceil(plusArea / PIXEL_PER_MM)
for i in [0..(plusCount - 1)]
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
minusArea = startY
minusCount = Math.floor(minusArea / PIXEL_PER_MM)
for i in [1..(minusCount - 1)]
y = startY - i * PIXEL_PER_MM
continue if y > marginBottom
break if y < marginTop
if (i % 10 == 0)
context.moveTo(baseX, y)
context.lineTo(endX, y)
else if (i % 5 == 0)
context.moveTo(baseX + 8, y)
context.lineTo(endX, y)
else
context.moveTo(baseX + 11, y)
context.lineTo(endX, y)
context.closePath()
context.fillStrokeShape(this)
for i in [0..(plusCount - 1)] by 10
y = startY + i * PIXEL_PER_MM
break if y > marginBottom
continue if y < marginTop
context.strokeText("#{i / 10}", 1, y + 10)
for i in [10..(minusCount - 1)] by 10
y = startY - i * PIXEL_PER_MM
break if y < marginTop
continue if y > marginBottom
context.strokeText("-#{i / 10}", 1, y + 10)
drawFunc = (context) ->
if this.getAttr('direction') isnt 'vertical'
drawHorizontal.apply this, arguments
else
drawVertical.apply this, arguments
createView = (attributes) ->
new Kinetic.Shape attributes
createHandle = (attributes) ->
new Kin.Rect(attributes)
{
type: 'ruler'
name: 'ruler'
description: 'Ruler Specification'
defaults: {
drawFunc: drawFunc
fill: '#848586'
stroke: '#C2C3C5'
strokeWidth: 0.5
width: 100
height: 50
margin: [15, 15]
zeropos: 15
direction: 'horizontal'
font: '8px Verdana'
}
view_factory_fn: createView
handle_factory_fn: createHandle
toolbox_image: 'images/toolbox_ruler.png'
}
|
[
{
"context": "ageCode] =\n githubURL: \"https://github.com/codecombat/codecombat/blob/master/app/locale/#{languageCode}",
"end": 1787,
"score": 0.9974166750907898,
"start": 1777,
"tag": "USERNAME",
"value": "codecombat"
},
{
"context": " # русский язык, Russian\n ... | app/views/contribute/DiplomatView.coffee | JurianLock/codecombat | 0 | ContributeClassView = require './ContributeClassView'
template = require 'templates/contribute/diplomat'
{me} = require 'core/auth'
require("locale/en")
require("locale/en-US")
require("locale/en-GB")
require("locale/ru")
require("locale/de-DE")
require("locale/de-AT")
require("locale/de-CH")
require("locale/es-419")
require("locale/es-ES")
require("locale/zh-HANS")
require("locale/zh-HANT")
require("locale/zh-WUU-HANS")
require("locale/zh-WUU-HANT")
require("locale/fr")
require("locale/ja")
require("locale/ar")
require("locale/pt-BR")
require("locale/pt-PT")
require("locale/pl")
require("locale/it")
require("locale/tr")
require("locale/nl-BE")
require("locale/nl-NL")
require("locale/fa")
require("locale/cs")
require("locale/sv")
require("locale/id")
require("locale/el")
require("locale/ro")
require("locale/vi")
require("locale/hu")
require("locale/th")
require("locale/da")
require("locale/ko")
require("locale/sk")
require("locale/sl")
require("locale/fi")
require("locale/bg")
require("locale/nb")
require("locale/nn")
require("locale/he")
require("locale/lt")
require("locale/sr")
require("locale/uk")
require("locale/hi")
require("locale/ur")
require("locale/ms")
require("locale/ca")
require("locale/gl")
require("locale/mk-MK")
require("locale/eo")
require("locale/uz")
require("locale/my")
require("locale/et")
module.exports = class DiplomatView extends ContributeClassView
id: 'diplomat-view'
template: template
contributorClassName: 'diplomat'
calculateSpokenLanguageStats: ->
@locale ?= require 'locale/locale'
totalStrings = @countStrings @locale.en
languageStats = {}
for languageCode, language of @locale
continue if languageCode is 'update'
languageStats[languageCode] =
githubURL: "https://github.com/codecombat/codecombat/blob/master/app/locale/#{languageCode}.coffee"
completion: @countStrings(language) / totalStrings
nativeDescription: language.nativeDescription
englishDescription: language.englishDescription
diplomats: @diplomats[languageCode]
languageCode: languageCode
languageStats
countStrings: (language) ->
translated = 0
for section, strings of language.translation
translated += _.size strings
translated
diplomats:
en: [] # English - English
'en-US': [] # English (US), English (US)
'en-GB': [] # English (UK), English (UK)
ru: ['EagleTA', 'ImmortalJoker', 'Mr A', 'Shpionus', 'a1ip', 'fess89', 'iulianR', 'kerradus', 'kisik21', 'nixel', 'ser-storchak'] # русский язык, Russian
'de-DE': ['Anon', 'Dirk', 'HiroP0', 'bahuma20', 'bkimminich', 'djsmith85', 'dkundel', 'domenukk', 'faabsen'] # Deutsch (Deutschland), German (Germany)
'de-AT': ['djsmith85'] # Deutsch (Österreich), German (Austria)
'de-CH': ['greyhusky'] # Deutsch (Schweiz), German (Switzerland)
'es-419': ['2xG', 'Federico Tomas', 'Jesús Ruppel', 'Mariano Luzza', 'Matthew Burt'] # español (América Latina), Spanish (Latin America)
'es-ES': ['3rr3s3v3n', 'Anon', 'DanielRodriguezRivero', 'Matthew Burt', 'OviiiOne', 'Pouyio', 'Vindurrin'] # español (ES), Spanish (Spain)
'zh-HANS': ['1c7', 'Adam23', 'BonnieBBS', 'Cheng Zheng', 'Vic020', 'ZephyrSails', 'julycoolwind', 'onion7878', 'spacepope', 'yangxuan8282', 'yfdyh000'] # 简体中文, Chinese (Simplified)
'zh-HANT': ['Adam23', 'gintau'] # 繁体中文, Chinese (Traditional)
'zh-WUU-HANS': [] # 吴语, Wuu (Simplified)
'zh-WUU-HANT': ['benojan'] # 吳語, Wuu (Traditional)
fr: ['Anon', 'Armaldio', 'ChrisLightman', 'Elfisen', 'Feugy', 'MartinDelille', 'Oaugereau', 'Xeonarno', 'dc55028', 'jaybi', 'pstweb', 'veritable', 'xavismeh'] # français, French
ja: ['Coderaulic', 'g1itch', 'kengos', 'treby'] # 日本語, Japanese
ar: ['5y', 'ahmed80dz'] # العربية, Arabic
'pt-BR': ['Bia41', 'Gutenberg Barros', 'Kieizroe', 'Matthew Burt', 'brunoporto', 'cassiocardoso', 'jklemm', 'Arkhad'] # português do Brasil, Portuguese (Brazil)
'pt-PT': ['Imperadeiro98', 'Matthew Burt', 'ProgramadorLucas', 'ReiDuKuduro', 'batista', 'gutierri'] # Português (Portugal), Portuguese (Portugal)
pl: ['Anon', 'Kacper Ciepielewski', 'TigroTigro', 'kvasnyk'] # język polski, Polish
it: ['AlessioPaternoster', 'flauta', 'Atomk'] # italiano, Italian
tr: ['Nazım Gediz Aydındoğmuş', 'cobaimelan', 'gediz', 'ilisyus', 'wakeup'] # Türkçe, Turkish
'nl-BE': ['Glen De Cauwsemaecker', 'Ruben Vereecken'] # Nederlands (België), Dutch (Belgium)
'nl-NL': ['Guido Zuidhof', "Jasper D\'haene"] # Nederlands (Nederland), Dutch (Netherlands)
fa: ['Reza Habibi (Rehb)'] # فارسی, Persian
cs: ['Martin005', 'Gygram', 'vanous'] # čeština, Czech
sv: ['iamhj', 'Galaky'] # Svenska, Swedish
id: ['mlewisno-oberlin'] # Bahasa Indonesia, Indonesian
el: ['Stergios', 'micman', 'zsdregas'] # ελληνικά, Greek
ro: [] # limba română, Romanian
vi: ['An Nguyen Hoang Thien'] # Tiếng Việt, Vietnamese
hu: ['Anon', 'atlantisguru', 'bbeasmile', 'csuvsaregal', 'divaDseidnA', 'ferpeter', 'kinez'] # magyar, Hungarian
th: ['Kamolchanok Jittrepit'] # ไทย, Thai
da: ['Anon', 'Einar Rasmussen', 'Rahazan', 'Randi Hillerøe', 'Silwing', 'marc-portier', 'sorsjen', 'Zleep-Dogg'] # dansk, Danish
ko: ['Melondonut'] # 한국어, Korean
sk: ['Anon', 'Juraj Pecháč'] # slovenčina, Slovak
sl: [] # slovenščina, Slovene
fi: [] # suomi, Finnish
bg: [] # български език, Bulgarian
nb: ['bardeh', 'ebirkenes', 'matifol', 'mcclane654', 'mogsie', 'torehaug'] # Norsk Bokmål, Norwegian (Bokmål)
nn: [] # Norsk Nynorsk, Norwegian (Nynorsk)
he: ['OverProgram', 'monetita'] # עברית, Hebrew
lt: [] # lietuvių kalba, Lithuanian
sr: [] # српски, Serbian
uk: ['ImmortalJoker', 'OlenaGapak', 'Rarst', 'endrilian', 'fess89', 'gorodsb', 'probil'] # українська мова, Ukrainian
hi: [] # मानक हिन्दी, Hindi
ur: [] # اُردُو, Urdu
ms: [] # Bahasa Melayu, Bahasa Malaysia
ca: ['ArniMcFrag', 'Nainufar'] # Català, Catalan
gl: ['mcaeiror'] # Galego, Galician
'mk-MK': ['SuperPranx'] # Македонски, Macedonian
eo: [] # Esperanto, Esperanto
uz: [] # O'zbekcha, Uzbek
my: [] # မြန်မာစကား, Myanmar language
et: [] # Eesti, Estonian
| 58781 | ContributeClassView = require './ContributeClassView'
template = require 'templates/contribute/diplomat'
{me} = require 'core/auth'
require("locale/en")
require("locale/en-US")
require("locale/en-GB")
require("locale/ru")
require("locale/de-DE")
require("locale/de-AT")
require("locale/de-CH")
require("locale/es-419")
require("locale/es-ES")
require("locale/zh-HANS")
require("locale/zh-HANT")
require("locale/zh-WUU-HANS")
require("locale/zh-WUU-HANT")
require("locale/fr")
require("locale/ja")
require("locale/ar")
require("locale/pt-BR")
require("locale/pt-PT")
require("locale/pl")
require("locale/it")
require("locale/tr")
require("locale/nl-BE")
require("locale/nl-NL")
require("locale/fa")
require("locale/cs")
require("locale/sv")
require("locale/id")
require("locale/el")
require("locale/ro")
require("locale/vi")
require("locale/hu")
require("locale/th")
require("locale/da")
require("locale/ko")
require("locale/sk")
require("locale/sl")
require("locale/fi")
require("locale/bg")
require("locale/nb")
require("locale/nn")
require("locale/he")
require("locale/lt")
require("locale/sr")
require("locale/uk")
require("locale/hi")
require("locale/ur")
require("locale/ms")
require("locale/ca")
require("locale/gl")
require("locale/mk-MK")
require("locale/eo")
require("locale/uz")
require("locale/my")
require("locale/et")
module.exports = class DiplomatView extends ContributeClassView
id: 'diplomat-view'
template: template
contributorClassName: 'diplomat'
calculateSpokenLanguageStats: ->
@locale ?= require 'locale/locale'
totalStrings = @countStrings @locale.en
languageStats = {}
for languageCode, language of @locale
continue if languageCode is 'update'
languageStats[languageCode] =
githubURL: "https://github.com/codecombat/codecombat/blob/master/app/locale/#{languageCode}.coffee"
completion: @countStrings(language) / totalStrings
nativeDescription: language.nativeDescription
englishDescription: language.englishDescription
diplomats: @diplomats[languageCode]
languageCode: languageCode
languageStats
countStrings: (language) ->
translated = 0
for section, strings of language.translation
translated += _.size strings
translated
diplomats:
en: [] # English - English
'en-US': [] # English (US), English (US)
'en-GB': [] # English (UK), English (UK)
ru: ['EagleTA', 'ImmortalJoker', 'Mr A', 'Shpionus', 'a1ip', 'fess89', 'iulianR', 'kerradus', 'kisik21', 'nixel', 'ser-storchak'] # русский язык, Russian
'de-DE': ['<NAME>', '<NAME>', '<NAME>', 'bahuma20', 'bkimminich', 'djsmith85', 'dkundel', 'domenukk', 'faabsen'] # Deutsch (Deutschland), German (Germany)
'de-AT': ['djsmith85'] # Deutsch (Österreich), German (Austria)
'de-CH': ['greyhusky'] # Deutsch (Schweiz), German (Switzerland)
'es-419': ['2xG', '<NAME>', '<NAME>', '<NAME>', '<NAME>'] # español (América Latina), Spanish (Latin America)
'es-ES': ['3rr3s3v3n', '<NAME>', '<NAME>', '<NAME>', 'OviiiOne', '<NAME>', '<NAME>'] # español (ES), Spanish (Spain)
'zh-HANS': ['1c7', 'Adam23', 'BonnieBBS', '<NAME>', 'Vic020', 'ZephyrSails', 'julycoolwind', 'onion7878', 'spacepope', 'yangxuan8282', 'yfdyh000'] # 简体中文, Chinese (Simplified)
'zh-HANT': ['Adam23', 'gintau'] # 繁体中文, Chinese (Traditional)
'zh-WUU-HANS': [] # 吴语, Wuu (Simplified)
'zh-WUU-HANT': ['benojan'] # 吳語, Wuu (Traditional)
fr: ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', 'dc55028', 'jaybi', 'pstweb', 'veritable', 'xavismeh'] # français, French
ja: ['Coderaulic', 'g1itch', 'kengos', 'treby'] # 日本語, Japanese
ar: ['5y', 'ahmed80dz'] # العربية, Arabic
'pt-BR': ['Bia41', '<NAME>', '<NAME>', '<NAME>', 'brunoporto', 'cassiocardoso', 'jklemm', 'Arkhad'] # português do Brasil, Portuguese (Brazil)
'pt-PT': ['Imperadeiro98', '<NAME>', 'ProgramadorLucas', 'ReiDuKuduro', 'batista', 'gutierri'] # Português (Portugal), Portuguese (Portugal)
pl: ['<NAME>', '<NAME>', '<NAME>ro<NAME>ro', 'kvas<NAME>'] # język polski, Polish
it: ['<NAME>', '<NAME>', 'Atom<NAME>'] # italiano, Italian
tr: ['<NAME>', 'co<NAME>', 'ged<NAME>', 'ilisyus', 'wake<NAME>'] # Türkçe, Turkish
'nl-BE': ['<NAME>', '<NAME>'] # Nederlands (België), Dutch (Belgium)
'nl-NL': ['<NAME>', "<NAME>"] # Nederlands (Nederland), Dutch (Netherlands)
fa: ['<NAME> (Rehb)'] # فارسی, Persian
cs: ['<NAME>', '<NAME>', '<NAME>'] # čeština, Czech
sv: ['<NAME>', '<NAME>'] # Svenska, Swedish
id: ['mlewisno-oberlin'] # Bahasa Indonesia, Indonesian
el: ['<NAME>', '<NAME>', 'zsdreg<NAME>'] # ελληνικά, Greek
ro: [] # limba română, Romanian
vi: ['<NAME>'] # Tiếng Việt, Vietnamese
hu: ['<NAME>', 'atlantisguru', '<NAME>', 'csuvsaregal', 'divaDseidnA', '<NAME>', '<NAME>'] # magyar, Hungarian
th: ['<NAME>amol<NAME>'] # ไทย, Thai
da: ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>'] # dansk, Danish
ko: ['Melondonut'] # 한국어, Korean
sk: ['<NAME>', '<NAME>'] # slovenčina, Slovak
sl: [] # slovenščina, Slovene
fi: [] # suomi, Finnish
bg: [] # български език, Bulgarian
nb: ['bar<NAME>', 'eb<NAME>', 'matifol', 'mcclane654', 'mogsie', 'tore<NAME>'] # Norsk Bokmål, Norwegian (Bokmål)
nn: [] # Norsk Nynorsk, Norwegian (Nynorsk)
he: ['OverProgram', 'monetita'] # עברית, Hebrew
lt: [] # lietuvių kalba, Lithuanian
sr: [] # српски, Serbian
uk: ['ImmortalJoker', '<NAME>', '<NAME>', 'endrilian', 'fess89', 'gorodsb', 'probil'] # українська мова, Ukrainian
hi: [] # मानक हिन्दी, Hindi
ur: [] # اُردُو, Urdu
ms: [] # Bahasa Melayu, Bahasa Malaysia
ca: ['ArniMcFrag', 'Nainufar'] # Català, Catalan
gl: ['mcaeiror'] # Galego, Galician
'mk-MK': ['SuperPranx'] # Македонски, Macedonian
eo: [] # Esperanto, Esperanto
uz: [] # O'zbekcha, Uzbek
my: [] # မြန်မာစကား, Myanmar language
et: [] # Eesti, Estonian
| true | ContributeClassView = require './ContributeClassView'
template = require 'templates/contribute/diplomat'
{me} = require 'core/auth'
require("locale/en")
require("locale/en-US")
require("locale/en-GB")
require("locale/ru")
require("locale/de-DE")
require("locale/de-AT")
require("locale/de-CH")
require("locale/es-419")
require("locale/es-ES")
require("locale/zh-HANS")
require("locale/zh-HANT")
require("locale/zh-WUU-HANS")
require("locale/zh-WUU-HANT")
require("locale/fr")
require("locale/ja")
require("locale/ar")
require("locale/pt-BR")
require("locale/pt-PT")
require("locale/pl")
require("locale/it")
require("locale/tr")
require("locale/nl-BE")
require("locale/nl-NL")
require("locale/fa")
require("locale/cs")
require("locale/sv")
require("locale/id")
require("locale/el")
require("locale/ro")
require("locale/vi")
require("locale/hu")
require("locale/th")
require("locale/da")
require("locale/ko")
require("locale/sk")
require("locale/sl")
require("locale/fi")
require("locale/bg")
require("locale/nb")
require("locale/nn")
require("locale/he")
require("locale/lt")
require("locale/sr")
require("locale/uk")
require("locale/hi")
require("locale/ur")
require("locale/ms")
require("locale/ca")
require("locale/gl")
require("locale/mk-MK")
require("locale/eo")
require("locale/uz")
require("locale/my")
require("locale/et")
module.exports = class DiplomatView extends ContributeClassView
id: 'diplomat-view'
template: template
contributorClassName: 'diplomat'
calculateSpokenLanguageStats: ->
@locale ?= require 'locale/locale'
totalStrings = @countStrings @locale.en
languageStats = {}
for languageCode, language of @locale
continue if languageCode is 'update'
languageStats[languageCode] =
githubURL: "https://github.com/codecombat/codecombat/blob/master/app/locale/#{languageCode}.coffee"
completion: @countStrings(language) / totalStrings
nativeDescription: language.nativeDescription
englishDescription: language.englishDescription
diplomats: @diplomats[languageCode]
languageCode: languageCode
languageStats
countStrings: (language) ->
translated = 0
for section, strings of language.translation
translated += _.size strings
translated
diplomats:
en: [] # English - English
'en-US': [] # English (US), English (US)
'en-GB': [] # English (UK), English (UK)
ru: ['EagleTA', 'ImmortalJoker', 'Mr A', 'Shpionus', 'a1ip', 'fess89', 'iulianR', 'kerradus', 'kisik21', 'nixel', 'ser-storchak'] # русский язык, Russian
'de-DE': ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'bahuma20', 'bkimminich', 'djsmith85', 'dkundel', 'domenukk', 'faabsen'] # Deutsch (Deutschland), German (Germany)
'de-AT': ['djsmith85'] # Deutsch (Österreich), German (Austria)
'de-CH': ['greyhusky'] # Deutsch (Schweiz), German (Switzerland)
'es-419': ['2xG', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # español (América Latina), Spanish (Latin America)
'es-ES': ['3rr3s3v3n', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'OviiiOne', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # español (ES), Spanish (Spain)
'zh-HANS': ['1c7', 'Adam23', 'BonnieBBS', 'PI:NAME:<NAME>END_PI', 'Vic020', 'ZephyrSails', 'julycoolwind', 'onion7878', 'spacepope', 'yangxuan8282', 'yfdyh000'] # 简体中文, Chinese (Simplified)
'zh-HANT': ['Adam23', 'gintau'] # 繁体中文, Chinese (Traditional)
'zh-WUU-HANS': [] # 吴语, Wuu (Simplified)
'zh-WUU-HANT': ['benojan'] # 吳語, Wuu (Traditional)
fr: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'dc55028', 'jaybi', 'pstweb', 'veritable', 'xavismeh'] # français, French
ja: ['Coderaulic', 'g1itch', 'kengos', 'treby'] # 日本語, Japanese
ar: ['5y', 'ahmed80dz'] # العربية, Arabic
'pt-BR': ['Bia41', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'brunoporto', 'cassiocardoso', 'jklemm', 'Arkhad'] # português do Brasil, Portuguese (Brazil)
'pt-PT': ['Imperadeiro98', 'PI:NAME:<NAME>END_PI', 'ProgramadorLucas', 'ReiDuKuduro', 'batista', 'gutierri'] # Português (Portugal), Portuguese (Portugal)
pl: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PIroPI:NAME:<NAME>END_PIro', 'kvasPI:NAME:<NAME>END_PI'] # język polski, Polish
it: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'AtomPI:NAME:<NAME>END_PI'] # italiano, Italian
tr: ['PI:NAME:<NAME>END_PI', 'coPI:NAME:<NAME>END_PI', 'gedPI:NAME:<NAME>END_PI', 'ilisyus', 'wakePI:NAME:<NAME>END_PI'] # Türkçe, Turkish
'nl-BE': ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # Nederlands (België), Dutch (Belgium)
'nl-NL': ['PI:NAME:<NAME>END_PI', "PI:NAME:<NAME>END_PI"] # Nederlands (Nederland), Dutch (Netherlands)
fa: ['PI:NAME:<NAME>END_PI (Rehb)'] # فارسی, Persian
cs: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # čeština, Czech
sv: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # Svenska, Swedish
id: ['mlewisno-oberlin'] # Bahasa Indonesia, Indonesian
el: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'zsdregPI:NAME:<NAME>END_PI'] # ελληνικά, Greek
ro: [] # limba română, Romanian
vi: ['PI:NAME:<NAME>END_PI'] # Tiếng Việt, Vietnamese
hu: ['PI:NAME:<NAME>END_PI', 'atlantisguru', 'PI:NAME:<NAME>END_PI', 'csuvsaregal', 'divaDseidnA', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # magyar, Hungarian
th: ['PI:NAME:<NAME>END_PIamolPI:NAME:<NAME>END_PI'] # ไทย, Thai
da: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # dansk, Danish
ko: ['Melondonut'] # 한국어, Korean
sk: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'] # slovenčina, Slovak
sl: [] # slovenščina, Slovene
fi: [] # suomi, Finnish
bg: [] # български език, Bulgarian
nb: ['barPI:NAME:<NAME>END_PI', 'ebPI:NAME:<NAME>END_PI', 'matifol', 'mcclane654', 'mogsie', 'torePI:NAME:<NAME>END_PI'] # Norsk Bokmål, Norwegian (Bokmål)
nn: [] # Norsk Nynorsk, Norwegian (Nynorsk)
he: ['OverProgram', 'monetita'] # עברית, Hebrew
lt: [] # lietuvių kalba, Lithuanian
sr: [] # српски, Serbian
uk: ['ImmortalJoker', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'endrilian', 'fess89', 'gorodsb', 'probil'] # українська мова, Ukrainian
hi: [] # मानक हिन्दी, Hindi
ur: [] # اُردُو, Urdu
ms: [] # Bahasa Melayu, Bahasa Malaysia
ca: ['ArniMcFrag', 'Nainufar'] # Català, Catalan
gl: ['mcaeiror'] # Galego, Galician
'mk-MK': ['SuperPranx'] # Македонски, Macedonian
eo: [] # Esperanto, Esperanto
uz: [] # O'zbekcha, Uzbek
my: [] # မြန်မာစကား, Myanmar language
et: [] # Eesti, Estonian
|
[
{
"context": "###\n * https://github.com/jkuetemeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg K",
"end": 37,
"score": 0.9985677599906921,
"start": 26,
"tag": "USERNAME",
"value": "jkuetemeier"
},
{
"context": "temeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg Kütemei... | src/register_tasks.coffee | kuetemeier/gulp-tasks-common | 0 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 Jörg Kütemeier
* Licensed under the MIT license.
###
_ = require 'lodash'
module.exports = (common) ->
(gulp, config) ->
if gulp == undefined
throw new Error 'gulp variable has to be defined'
config = config || {}
_.defaults config, common.config
# try to register all tasks
_.forIn common.tasks, (value, key) ->
if value.task
# if a 'task' property is defined, register this task
value.task gulp, config[key]
else
# or create a default task (just call 'fn' property)
taskConfig = config[key]
if taskConfig.enabled
gulp.task key, ->
value.fn gulp, taskConfig
| 143623 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
_ = require 'lodash'
module.exports = (common) ->
(gulp, config) ->
if gulp == undefined
throw new Error 'gulp variable has to be defined'
config = config || {}
_.defaults config, common.config
# try to register all tasks
_.forIn common.tasks, (value, key) ->
if value.task
# if a 'task' property is defined, register this task
value.task gulp, config[key]
else
# or create a default task (just call 'fn' property)
taskConfig = config[key]
if taskConfig.enabled
gulp.task key, ->
value.fn gulp, taskConfig
| true | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
_ = require 'lodash'
module.exports = (common) ->
(gulp, config) ->
if gulp == undefined
throw new Error 'gulp variable has to be defined'
config = config || {}
_.defaults config, common.config
# try to register all tasks
_.forIn common.tasks, (value, key) ->
if value.task
# if a 'task' property is defined, register this task
value.task gulp, config[key]
else
# or create a default task (just call 'fn' property)
taskConfig = config[key]
if taskConfig.enabled
gulp.task key, ->
value.fn gulp, taskConfig
|
[
{
"context": "ound patch\"\n }\n {\n id: 3\n title: \"Sharks\"\n info: \"Cat sample patch\"\n }\n ]\n a",
"end": 479,
"score": 0.6136012077331543,
"start": 477,
"tag": "NAME",
"value": "Sh"
}
] | app/js/features/patches/patch_service.coffee | 17thDimension/synthesizer | 0 | ###
A simple example service that returns some data.
###
angular.module("synthesizer")
.factory "PatchService", ->
# Might use a resource here that returns a JSON array
# Some fake testing data
patches = [
{
id: 0
title: "Cats"
info: "Cat sample patch"
}
{
id: 1
title: "Dogs"
info: "Real Doggy barks"
}
{
id: 2
title: "Turtles"
info: "Turtle sound patch"
}
{
id: 3
title: "Sharks"
info: "Cat sample patch"
}
]
all: ->
patches
get: (patchId) ->
# Simple index lookup
patches[patchId]
| 158546 | ###
A simple example service that returns some data.
###
angular.module("synthesizer")
.factory "PatchService", ->
# Might use a resource here that returns a JSON array
# Some fake testing data
patches = [
{
id: 0
title: "Cats"
info: "Cat sample patch"
}
{
id: 1
title: "Dogs"
info: "Real Doggy barks"
}
{
id: 2
title: "Turtles"
info: "Turtle sound patch"
}
{
id: 3
title: "<NAME>arks"
info: "Cat sample patch"
}
]
all: ->
patches
get: (patchId) ->
# Simple index lookup
patches[patchId]
| true | ###
A simple example service that returns some data.
###
angular.module("synthesizer")
.factory "PatchService", ->
# Might use a resource here that returns a JSON array
# Some fake testing data
patches = [
{
id: 0
title: "Cats"
info: "Cat sample patch"
}
{
id: 1
title: "Dogs"
info: "Real Doggy barks"
}
{
id: 2
title: "Turtles"
info: "Turtle sound patch"
}
{
id: 3
title: "PI:NAME:<NAME>END_PIarks"
info: "Cat sample patch"
}
]
all: ->
patches
get: (patchId) ->
# Simple index lookup
patches[patchId]
|
[
{
"context": "al notes required for the script>\n#\n# Author:\n# 神楽坂喵\n\nmodule.exports = (robot) ->\n\n robot.hear /nyaa ",
"end": 283,
"score": 0.9997584223747253,
"start": 279,
"tag": "NAME",
"value": "神楽坂喵"
}
] | src/nyaa-net.coffee | KagurazakaNyaa/hubot-nyaa-net | 0 | # Description
# nyaa.net 搜索机器人脚本
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# nyaa search <name> <page> <size> - 搜索nyaa.net
# sukebei search <name> <page> <size> - 搜索sukebei.nyaa.net
#
# Notes:
# <optional notes required for the script>
#
# Author:
# 神楽坂喵
module.exports = (robot) ->
robot.hear /nyaa search (.*) (.*) (.*)/, (res) ->
robot.http("https://nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
robot.hear /sukebei search (.*) (.*) (.*)/, (res) ->
robot.http("https://sukebei.nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
`
function humanFileSize(bytes) {
var thresh = 1024;
if(Math.abs(bytes) < thresh) {
return bytes + ' B';
}
var units = ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
var u = -1;
do {
bytes /= thresh;
++u;
} while(Math.abs(bytes) >= thresh && u < units.length - 1);
return bytes.toFixed(3)+' '+units[u];
}
` | 92670 | # Description
# nyaa.net 搜索机器人脚本
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# nyaa search <name> <page> <size> - 搜索nyaa.net
# sukebei search <name> <page> <size> - 搜索sukebei.nyaa.net
#
# Notes:
# <optional notes required for the script>
#
# Author:
# <NAME>
module.exports = (robot) ->
robot.hear /nyaa search (.*) (.*) (.*)/, (res) ->
robot.http("https://nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
robot.hear /sukebei search (.*) (.*) (.*)/, (res) ->
robot.http("https://sukebei.nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
`
function humanFileSize(bytes) {
var thresh = 1024;
if(Math.abs(bytes) < thresh) {
return bytes + ' B';
}
var units = ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
var u = -1;
do {
bytes /= thresh;
++u;
} while(Math.abs(bytes) >= thresh && u < units.length - 1);
return bytes.toFixed(3)+' '+units[u];
}
` | true | # Description
# nyaa.net 搜索机器人脚本
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# nyaa search <name> <page> <size> - 搜索nyaa.net
# sukebei search <name> <page> <size> - 搜索sukebei.nyaa.net
#
# Notes:
# <optional notes required for the script>
#
# Author:
# PI:NAME:<NAME>END_PI
module.exports = (robot) ->
robot.hear /nyaa search (.*) (.*) (.*)/, (res) ->
robot.http("https://nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
robot.hear /sukebei search (.*) (.*) (.*)/, (res) ->
robot.http("https://sukebei.nyaa.net/api/search?q=#{res.match[1]}&page=#{res.match[2]}&limit=#{res.match[3]}&sort=3&order=false")
.get() (err, resp, body) ->
if err
res.send "got problem when request search: #{err}"
robot.emit 'error', err, resp
return
data = null
try
data = JSON.parse body
catch error
res.send "got JSON parse error #{error}"
return
res.send "展示搜索到的`#{data.queryRecordCount}`个结果,共有`#{data.totalRecordCount}`个结果。"
res.send "*名称* : `#{torrent.name}` \n*描述* : #{torrent.description} \n*文件大小* : `#{humanFileSize(torrent.filesize)}` \n*上传日期* : `#{torrent.date}` \n*磁链* : `#{torrent.magnet}` \n*种子* : #{torrent.torrent}" for torrent in data.torrents
return
`
function humanFileSize(bytes) {
var thresh = 1024;
if(Math.abs(bytes) < thresh) {
return bytes + ' B';
}
var units = ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
var u = -1;
do {
bytes /= thresh;
++u;
} while(Math.abs(bytes) >= thresh && u < units.length - 1);
return bytes.toFixed(3)+' '+units[u];
}
` |
[
{
"context": " target: \"#{scratch}/file\"\n content: \"hello nikita\"\n .file.assert\n target: \"#{scratch}/f",
"end": 6452,
"score": 0.9752916693687439,
"start": 6446,
"tag": "NAME",
"value": "nikita"
},
{
"context": " match: /(username)=(.*)/\n replace... | packages/core/test/file/index.coffee | chibanemourad/node-nikita | 0 |
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'file', ->
describe 'options content', ->
they 'is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'Hello'
.promise()
they 'is a function', ({ssh}) ->
content = 'invalid'
nikita
ssh: ssh
.call ->
content = 'valid'
.file
target: "#{scratch}/file"
trigger: true
content: ({options}) -> content if options.trigger
.file.assert
target: "#{scratch}/file"
content: 'valid'
.promise()
they 'status is false is content is the same', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'with source is a file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_source"
content: 'Hello'
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_source"
content: 'Hello'
.file.assert
target: "#{scratch}/a_target"
content: 'Hello'
.promise()
they 'empty file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.promise()
they 'touch file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.file
target: "#{scratch}/empty_file"
content: 'toto'
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/empty_file"
content: 'toto'
.promise()
they 'handle integer type', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 123
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a_file"
content: '123'
.promise()
they 'create parent directory', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
.promise()
describe 'link', ->
they 'follow link by default', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
.file.assert
target: "#{scratch}/target"
content: 'ok'
.file.assert
target: "#{scratch}/link"
content: 'ok'
.promise()
they 'throw error if link is a directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
relax: true
, (err) ->
err.code.should.eql 'EISDIR'
.promise()
they 'dont follow link if option "unlink"', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/a_target"
.system.link
source: "#{scratch}/a_target"
target: "#{scratch}/a_link"
.file
content: 'ok'
target: "#{scratch}/a_link"
unlink: true
.file.assert
target: "#{scratch}/a_target"
content: 'ko'
.file.assert
target: "#{scratch}/a_link"
content: 'ok'
.promise()
they 'dont follow link if option "unlink" and link is directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
unlink: true
.file.assert
target: "#{scratch}/link"
content: 'ok'
filetype: 'file'
.file.assert
target: "#{scratch}/target"
filetype: 'directory'
.promise()
describe 'ownerships and permissions', ->
they 'set permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_file"
mode: 0o0700
.promise()
they 'does not modify parent', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/a_dir"
mode: 0o0744
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_dir"
mode: 0o0744
.promise()
they 'ensure mode is preserve on content update', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'hello'
mode: 0o0755
.file
target: "#{scratch}/file"
content: "hello nikita"
.file.assert
target: "#{scratch}/file"
mode: 0o0755
.promise()
they 'change permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'change permission after modification', ({ssh}) ->
nikita
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'Hello'
mode: 0o0700
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'World'
mode: 0o0755
.file.assert
target: "#{scratch}/a_file"
mode: 0o0755
.promise()
describe 'from and to', ->
they 'with from and with to', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend\n# to\nyou coquin'
.promise()
they 'with from and with to append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my friend'
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy friend\n# to'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my best friend'
eof: true
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy best friend\n# to\n'
.promise()
they 'with from and without to', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
from: '# from'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend'
.promise()
they 'without from and with to', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/fromto.md"
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'my friend\n# to\nyou coquin'
.promise()
describe 'replace', ->
they 'without match and place_before a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou+coquin'
replace: 'my friend'
place_before: 'you+coquin' # Regexp must escape the plus sign
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou+coquin'
.promise()
they 'without match and place_before a regexp', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
replace: 'my friend'
place_before: /^you coquin$/m
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
describe 'match & replace', ->
they 'with match a line as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'lets try to replace that one'
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/fromto.md"
match: 'my friend'
replace: 'my friend'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
they 'with match a word as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'replace'
content: 'replace that one\nand\nreplace this one\nand not this one'
replace: 'switch'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'switch that one\nand\nswitch this one\nand not this one'
.promise()
they 'with match as a regular expression', ({ssh}) ->
# With a match
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=root'
match: /(username)=(.*)/
replace: '$1=david (was $2)'
, (err, {status}) ->
status.should.be.true() unless err
.file # Without a match
target: "#{scratch}/replace"
match: /this wont work/
replace: '$1=david (was $2)'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=david (was root)'
.promise()
they 'with match as a regular expression and multiple content', ({ssh}) ->
nikita
ssh: ssh
.file
match: /(.*try) (.*)/
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: ['my friend, $1']
target: "#{scratch}/replace"
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/replace"
content: 'here we are\nmy friend, lets try\nyou coquin'
.promise()
they 'with match with global and multilines', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
match: /^property=.*$/mg
content: '#A config file\n#property=30\nproperty=10\nproperty=20\n#End of Config'
replace: 'property=50'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/replace"
match: /^property=50$/mg
replace: 'property=50'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: '#A config file\n#property=30\nproperty=50\nproperty=50\n#End of Config'
.promise()
they 'will replace target if source or content does not exists', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'This is\nsome content\nfor testing'
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'This is\na text\nfor testing'
.promise()
describe 'place_before', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'is true, prepend the content', ({ssh}) ->
# File doesnt exists, creates one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'world'
place_before: true
.file # File exists, prepends to it
target: "#{scratch}/a_file"
replace: 'hello'
place_before: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello\nworld'
.promise()
describe 'append', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'append content to existing file', ({ssh}) ->
# File does not exists, it create one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file # File exists, it append to it
target: "#{scratch}/a_file"
content: 'world'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'helloworld'
.promise()
describe 'match & append or place_before', ->
describe 'will not prepend/append if match', ->
they 'place_before true, replace a string, match a regexp', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_before true, replace a string, match a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: "you coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: "new coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
.promise()
they 'will append if no match', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'place_before/place_after a match if it is a regexp', ->
they 'place_before', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nhave fun'
.promise()
describe 'place_before/place_after multiple times if regexp with global flag', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nAdd this line\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
they 'will append place_after a match if append is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: 'we'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
describe 'will detect new line if no match', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'create file if not exists', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'match is optional', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin'
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file
target: "#{scratch}/a_file"
write: [
replace: 'Add this line'
append: true
]
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin\nAdd this line'
.promise()
describe 'backup', ->
they 'create a file', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
backup: '.bck'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file.bck"
not: true
.file
target: "#{scratch}/file"
content: 'Hello Node'
backup: '.bck'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file.bck"
content: 'Hello'
.promise()
they 'a non-existing file', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/new_file"
content: 'Hello'
backup: true
, (err, {status}) ->
status.should.be.true() unless err
.promise()
they 'with specific permissions', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
.file
target: "#{scratch}/new_file_perm"
content: 'Hello'
mode: 0o0644
backup: '.bck1'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck1"
content: 'Hello World'
mode: 0o0400
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
backup: '.bck2'
mode: 0o0644
backup_mode: 0o0640
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck2"
content: 'Hello'
mode: 0o0640
.promise()
describe 'write', ->
they 'do multiple replace', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: ""
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\n\nfriends: me'
.promise()
they 'use append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: "email: your@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\nemail: your@email\nfriends: me'
.promise()
they 'handle partial match', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: none'
.file
target: "#{scratch}/file"
write: [
match: /^will never match$/m
replace: "useless"
,
match: /^email.*$/m
replace: "email: my@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: you"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.promise()
describe 'error', ->
they 'can not define source and content', ({ssh}) ->
nikita.file
ssh: ssh
target: 'abc'
source: 'abc'
content: 'abc'
relax: true
, (err) ->
err.message.should.eql 'Define either source or content'
.promise()
they 'if source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
they 'if local source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
local: true
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
describe 'eof', ->
they 'auto-detected', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'this is\r\nsome content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is\r\nsome content\r\n'
.promise()
they 'not detected', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/file"
content: 'this is some content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is some content\n'
.promise()
| 120923 |
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'file', ->
describe 'options content', ->
they 'is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'Hello'
.promise()
they 'is a function', ({ssh}) ->
content = 'invalid'
nikita
ssh: ssh
.call ->
content = 'valid'
.file
target: "#{scratch}/file"
trigger: true
content: ({options}) -> content if options.trigger
.file.assert
target: "#{scratch}/file"
content: 'valid'
.promise()
they 'status is false is content is the same', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'with source is a file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_source"
content: 'Hello'
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_source"
content: 'Hello'
.file.assert
target: "#{scratch}/a_target"
content: 'Hello'
.promise()
they 'empty file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.promise()
they 'touch file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.file
target: "#{scratch}/empty_file"
content: 'toto'
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/empty_file"
content: 'toto'
.promise()
they 'handle integer type', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 123
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a_file"
content: '123'
.promise()
they 'create parent directory', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
.promise()
describe 'link', ->
they 'follow link by default', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
.file.assert
target: "#{scratch}/target"
content: 'ok'
.file.assert
target: "#{scratch}/link"
content: 'ok'
.promise()
they 'throw error if link is a directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
relax: true
, (err) ->
err.code.should.eql 'EISDIR'
.promise()
they 'dont follow link if option "unlink"', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/a_target"
.system.link
source: "#{scratch}/a_target"
target: "#{scratch}/a_link"
.file
content: 'ok'
target: "#{scratch}/a_link"
unlink: true
.file.assert
target: "#{scratch}/a_target"
content: 'ko'
.file.assert
target: "#{scratch}/a_link"
content: 'ok'
.promise()
they 'dont follow link if option "unlink" and link is directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
unlink: true
.file.assert
target: "#{scratch}/link"
content: 'ok'
filetype: 'file'
.file.assert
target: "#{scratch}/target"
filetype: 'directory'
.promise()
describe 'ownerships and permissions', ->
they 'set permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_file"
mode: 0o0700
.promise()
they 'does not modify parent', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/a_dir"
mode: 0o0744
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_dir"
mode: 0o0744
.promise()
they 'ensure mode is preserve on content update', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'hello'
mode: 0o0755
.file
target: "#{scratch}/file"
content: "hello <NAME>"
.file.assert
target: "#{scratch}/file"
mode: 0o0755
.promise()
they 'change permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'change permission after modification', ({ssh}) ->
nikita
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'Hello'
mode: 0o0700
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'World'
mode: 0o0755
.file.assert
target: "#{scratch}/a_file"
mode: 0o0755
.promise()
describe 'from and to', ->
they 'with from and with to', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend\n# to\nyou coquin'
.promise()
they 'with from and with to append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my friend'
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy friend\n# to'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my best friend'
eof: true
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy best friend\n# to\n'
.promise()
they 'with from and without to', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
from: '# from'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend'
.promise()
they 'without from and with to', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/fromto.md"
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'my friend\n# to\nyou coquin'
.promise()
describe 'replace', ->
they 'without match and place_before a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou+coquin'
replace: 'my friend'
place_before: 'you+coquin' # Regexp must escape the plus sign
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou+coquin'
.promise()
they 'without match and place_before a regexp', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
replace: 'my friend'
place_before: /^you coquin$/m
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
describe 'match & replace', ->
they 'with match a line as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'lets try to replace that one'
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/fromto.md"
match: 'my friend'
replace: 'my friend'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
they 'with match a word as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'replace'
content: 'replace that one\nand\nreplace this one\nand not this one'
replace: 'switch'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'switch that one\nand\nswitch this one\nand not this one'
.promise()
they 'with match as a regular expression', ({ssh}) ->
# With a match
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=root'
match: /(username)=(.*)/
replace: '$1=<NAME> (was $2)'
, (err, {status}) ->
status.should.be.true() unless err
.file # Without a match
target: "#{scratch}/replace"
match: /this wont work/
replace: '$1=<NAME> (was $2)'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=david (was root)'
.promise()
they 'with match as a regular expression and multiple content', ({ssh}) ->
nikita
ssh: ssh
.file
match: /(.*try) (.*)/
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: ['my friend, $1']
target: "#{scratch}/replace"
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/replace"
content: 'here we are\nmy friend, lets try\nyou coquin'
.promise()
they 'with match with global and multilines', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
match: /^property=.*$/mg
content: '#A config file\n#property=30\nproperty=10\nproperty=20\n#End of Config'
replace: 'property=50'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/replace"
match: /^property=50$/mg
replace: 'property=50'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: '#A config file\n#property=30\nproperty=50\nproperty=50\n#End of Config'
.promise()
they 'will replace target if source or content does not exists', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'This is\nsome content\nfor testing'
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'This is\na text\nfor testing'
.promise()
describe 'place_before', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'is true, prepend the content', ({ssh}) ->
# File doesnt exists, creates one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'world'
place_before: true
.file # File exists, prepends to it
target: "#{scratch}/a_file"
replace: 'hello'
place_before: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello\nworld'
.promise()
describe 'append', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'append content to existing file', ({ssh}) ->
# File does not exists, it create one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file # File exists, it append to it
target: "#{scratch}/a_file"
content: 'world'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'helloworld'
.promise()
describe 'match & append or place_before', ->
describe 'will not prepend/append if match', ->
they 'place_before true, replace a string, match a regexp', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_before true, replace a string, match a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: "you coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: "new coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
.promise()
they 'will append if no match', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'place_before/place_after a match if it is a regexp', ->
they 'place_before', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nhave fun'
.promise()
describe 'place_before/place_after multiple times if regexp with global flag', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nAdd this line\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
they 'will append place_after a match if append is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: 'we'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
describe 'will detect new line if no match', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'create file if not exists', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'match is optional', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin'
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file
target: "#{scratch}/a_file"
write: [
replace: 'Add this line'
append: true
]
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin\nAdd this line'
.promise()
describe 'backup', ->
they 'create a file', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
backup: '.bck'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file.bck"
not: true
.file
target: "#{scratch}/file"
content: 'Hello Node'
backup: '.bck'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file.bck"
content: 'Hello'
.promise()
they 'a non-existing file', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/new_file"
content: 'Hello'
backup: true
, (err, {status}) ->
status.should.be.true() unless err
.promise()
they 'with specific permissions', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
.file
target: "#{scratch}/new_file_perm"
content: 'Hello'
mode: 0o0644
backup: '.bck1'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck1"
content: 'Hello World'
mode: 0o0400
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
backup: '.bck2'
mode: 0o0644
backup_mode: 0o0640
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck2"
content: 'Hello'
mode: 0o0640
.promise()
describe 'write', ->
they 'do multiple replace', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: ""
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\n\nfriends: me'
.promise()
they 'use append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: "email: your@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\nemail: your@email\nfriends: me'
.promise()
they 'handle partial match', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: none'
.file
target: "#{scratch}/file"
write: [
match: /^will never match$/m
replace: "useless"
,
match: /^email.*$/m
replace: "email: my@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: you"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.promise()
describe 'error', ->
they 'can not define source and content', ({ssh}) ->
nikita.file
ssh: ssh
target: 'abc'
source: 'abc'
content: 'abc'
relax: true
, (err) ->
err.message.should.eql 'Define either source or content'
.promise()
they 'if source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
they 'if local source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
local: true
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
describe 'eof', ->
they 'auto-detected', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'this is\r\nsome content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is\r\nsome content\r\n'
.promise()
they 'not detected', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/file"
content: 'this is some content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is some content\n'
.promise()
| true |
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'file', ->
describe 'options content', ->
they 'is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'Hello'
.promise()
they 'is a function', ({ssh}) ->
content = 'invalid'
nikita
ssh: ssh
.call ->
content = 'valid'
.file
target: "#{scratch}/file"
trigger: true
content: ({options}) -> content if options.trigger
.file.assert
target: "#{scratch}/file"
content: 'valid'
.promise()
they 'status is false is content is the same', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'with source is a file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_source"
content: 'Hello'
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_target"
source: "#{scratch}/a_source"
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_source"
content: 'Hello'
.file.assert
target: "#{scratch}/a_target"
content: 'Hello'
.promise()
they 'empty file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.promise()
they 'touch file', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/empty_file"
content: ''
.file
target: "#{scratch}/empty_file"
content: 'toto'
.file
target: "#{scratch}/empty_file"
content: ''
unless_exists: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/empty_file"
content: 'toto'
.promise()
they 'handle integer type', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 123
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a_file"
content: '123'
.promise()
they 'create parent directory', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/a/missing/dir/a_file"
content: 'hello'
.promise()
describe 'link', ->
they 'follow link by default', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
.file.assert
target: "#{scratch}/target"
content: 'ok'
.file.assert
target: "#{scratch}/link"
content: 'ok'
.promise()
they 'throw error if link is a directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
relax: true
, (err) ->
err.code.should.eql 'EISDIR'
.promise()
they 'dont follow link if option "unlink"', ({ssh}) ->
nikita
ssh: ssh
.file
content: 'ko'
target: "#{scratch}/a_target"
.system.link
source: "#{scratch}/a_target"
target: "#{scratch}/a_link"
.file
content: 'ok'
target: "#{scratch}/a_link"
unlink: true
.file.assert
target: "#{scratch}/a_target"
content: 'ko'
.file.assert
target: "#{scratch}/a_link"
content: 'ok'
.promise()
they 'dont follow link if option "unlink" and link is directory', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/target"
.system.link
source: "#{scratch}/target"
target: "#{scratch}/link"
.file
content: 'ok'
target: "#{scratch}/link"
unlink: true
.file.assert
target: "#{scratch}/link"
content: 'ok'
filetype: 'file'
.file.assert
target: "#{scratch}/target"
filetype: 'directory'
.promise()
describe 'ownerships and permissions', ->
they 'set permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_file"
mode: 0o0700
.promise()
they 'does not modify parent', ({ssh}) ->
nikita
ssh: ssh
.system.mkdir
target: "#{scratch}/a_dir"
mode: 0o0744
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file.assert
target: "#{scratch}/a_dir"
mode: 0o0744
.promise()
they 'ensure mode is preserve on content update', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'hello'
mode: 0o0755
.file
target: "#{scratch}/file"
content: "hello PI:NAME:<NAME>END_PI"
.file.assert
target: "#{scratch}/file"
mode: 0o0755
.promise()
they 'change permission', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0700
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
content: 'ok'
mode: 0o0705
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'change permission after modification', ({ssh}) ->
nikita
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'Hello'
mode: 0o0700
.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'World'
mode: 0o0755
.file.assert
target: "#{scratch}/a_file"
mode: 0o0755
.promise()
describe 'from and to', ->
they 'with from and with to', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend\n# to\nyou coquin'
.promise()
they 'with from and with to append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my friend'
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy friend\n# to'
.file
target: "#{scratch}/fromto.md"
from: '# from'
to: '# to'
append: true
replace: 'my best friend'
eof: true
, (err, {status}) ->
status.should.be.true()
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin\n# from\nmy best friend\n# to\n'
.promise()
they 'with from and without to', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
from: '# from'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\n# from\nmy friend'
.promise()
they 'without from and with to', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/fromto.md"
to: '# to'
content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'my friend\n# to\nyou coquin'
.promise()
describe 'replace', ->
they 'without match and place_before a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou+coquin'
replace: 'my friend'
place_before: 'you+coquin' # Regexp must escape the plus sign
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou+coquin'
.promise()
they 'without match and place_before a regexp', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
content: 'here we are\nyou coquin'
replace: 'my friend'
place_before: /^you coquin$/m
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
describe 'match & replace', ->
they 'with match a line as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'lets try to replace that one'
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: 'my friend'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/fromto.md"
match: 'my friend'
replace: 'my friend'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'here we are\nmy friend\nyou coquin'
.promise()
they 'with match a word as a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/fromto.md"
match: 'replace'
content: 'replace that one\nand\nreplace this one\nand not this one'
replace: 'switch'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/fromto.md"
content: 'switch that one\nand\nswitch this one\nand not this one'
.promise()
they 'with match as a regular expression', ({ssh}) ->
# With a match
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=root'
match: /(username)=(.*)/
replace: '$1=PI:NAME:<NAME>END_PI (was $2)'
, (err, {status}) ->
status.should.be.true() unless err
.file # Without a match
target: "#{scratch}/replace"
match: /this wont work/
replace: '$1=PI:NAME:<NAME>END_PI (was $2)'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: 'email=david(at)adaltas(dot)com\nusername=david (was root)'
.promise()
they 'with match as a regular expression and multiple content', ({ssh}) ->
nikita
ssh: ssh
.file
match: /(.*try) (.*)/
content: 'here we are\nlets try to replace that one\nyou coquin'
replace: ['my friend, $1']
target: "#{scratch}/replace"
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/replace"
content: 'here we are\nmy friend, lets try\nyou coquin'
.promise()
they 'with match with global and multilines', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/replace"
match: /^property=.*$/mg
content: '#A config file\n#property=30\nproperty=10\nproperty=20\n#End of Config'
replace: 'property=50'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/replace"
match: /^property=50$/mg
replace: 'property=50'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/replace"
content: '#A config file\n#property=30\nproperty=50\nproperty=50\n#End of Config'
.promise()
they 'will replace target if source or content does not exists', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'This is\nsome content\nfor testing'
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
match: /(.*content)/
replace: 'a text'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'This is\na text\nfor testing'
.promise()
describe 'place_before', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'is true, prepend the content', ({ssh}) ->
# File doesnt exists, creates one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'world'
place_before: true
.file # File exists, prepends to it
target: "#{scratch}/a_file"
replace: 'hello'
place_before: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello\nworld'
.promise()
describe 'append', ->
they 'append content to missing file', ({ssh}) ->
# File does not exist, it create it with the content
nikita.file
ssh: ssh
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'hello'
.promise()
they 'append content to existing file', ({ssh}) ->
# File does not exists, it create one
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'hello'
append: true
.file # File exists, it append to it
target: "#{scratch}/a_file"
content: 'world'
append: true
.file.assert
target: "#{scratch}/a_file"
content: 'helloworld'
.promise()
describe 'match & append or place_before', ->
describe 'will not prepend/append if match', ->
they 'place_before true, replace a string, match a regexp', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_before true, replace a string, match a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'you coquin\nhere we are\n'
.file
target: "#{scratch}/file"
match: "you coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: "new coquin"
replace: 'new coquin'
place_before: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'new coquin\nhere we are\n'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
# Write a second time with same match
.file
target: "#{scratch}/file"
match: /.*coquin/
replace: 'new coquin'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nnew coquin\n'
.promise()
they 'will append if no match', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\n'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'place_before/place_after a match if it is a regexp', ->
they 'place_before', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/m
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nhave fun'
.promise()
describe 'place_before/place_after multiple times if regexp with global flag', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
place_before: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin\nAdd this line\nshould we\nhave fun'
.promise()
they 'place_after', ({ssh}) ->
# Prepare by creating a file with content
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: /^.*we.*$/gm
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
they 'will append place_after a match if append is a string', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nshould we\nhave fun'
.file
target: "#{scratch}/file"
match: /will never work/
replace: 'Add this line'
append: 'we'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun'
.promise()
describe 'will detect new line if no match', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line\nhere we are\nyou coquin'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'here we are\nyou coquin'
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'here we are\nyou coquin\nAdd this line'
.promise()
describe 'create file if not exists', ->
they 'place_before', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
place_before: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'place_after', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
match: /will never be found/
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'Add this line'
.promise()
they 'match is optional', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin'
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.true() unless err
.file
target: "#{scratch}/a_file"
replace: 'Add this line'
append: true
, (err, {status}) ->
status.should.be.false() unless err
.file
target: "#{scratch}/a_file"
write: [
replace: 'Add this line'
append: true
]
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/a_file"
content: 'Here we are\nyou coquin\nAdd this line'
.promise()
describe 'backup', ->
they 'create a file', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'Hello'
.file
target: "#{scratch}/file"
content: 'Hello'
backup: '.bck'
, (err, {status}) ->
status.should.be.false() unless err
.file.assert
target: "#{scratch}/file.bck"
not: true
.file
target: "#{scratch}/file"
content: 'Hello Node'
backup: '.bck'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file.bck"
content: 'Hello'
.promise()
they 'a non-existing file', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/new_file"
content: 'Hello'
backup: true
, (err, {status}) ->
status.should.be.true() unless err
.promise()
they 'with specific permissions', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
.file
target: "#{scratch}/new_file_perm"
content: 'Hello'
mode: 0o0644
backup: '.bck1'
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck1"
content: 'Hello World'
mode: 0o0400
.file
target: "#{scratch}/new_file_perm"
content: 'Hello World'
backup: '.bck2'
mode: 0o0644
backup_mode: 0o0640
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/new_file_perm.bck2"
content: 'Hello'
mode: 0o0640
.promise()
describe 'write', ->
they 'do multiple replace', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: ""
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\n\nfriends: me'
.promise()
they 'use append', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: you'
.file
target: "#{scratch}/file"
write: [
match: /^(username).*$/m
replace: "$1: you"
,
match: /^email.*$/m
replace: "email: your@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: me"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: you\nemail: your@email\nfriends: me'
.promise()
they 'handle partial match', ({ssh}) ->
# First we create a file
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'username: me\nfriends: none'
.file
target: "#{scratch}/file"
write: [
match: /^will never match$/m
replace: "useless"
,
match: /^email.*$/m
replace: "email: my@email"
append: 'username'
,
match: /^(friends).*$/m
replace: "$1: you"
]
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'username: me\nemail: my@email\nfriends: you'
.promise()
describe 'error', ->
they 'can not define source and content', ({ssh}) ->
nikita.file
ssh: ssh
target: 'abc'
source: 'abc'
content: 'abc'
relax: true
, (err) ->
err.message.should.eql 'Define either source or content'
.promise()
they 'if source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
they 'if local source doesn\'t exists', ({ssh}) ->
nikita.file
ssh: ssh
target: "#{scratch}/file"
source: "#{scratch}/does/not/exists"
local: true
relax: true
, (err) ->
err.message.should.eql "Source does not exist: \"#{scratch}/does/not/exists\""
.promise()
describe 'eof', ->
they 'auto-detected', ({ssh}) ->
nikita
ssh: ssh
.file
target: "#{scratch}/file"
content: 'this is\r\nsome content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is\r\nsome content\r\n'
.promise()
they 'not detected', ({ssh}) ->
nikita
ssh: ssh
.file
ssh: ssh
target: "#{scratch}/file"
content: 'this is some content'
eof: true
, (err, {status}) ->
status.should.be.true() unless err
.file.assert
target: "#{scratch}/file"
content: 'this is some content\n'
.promise()
|
[
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>\nAll rights reserved.\n\nRedistri",
"end": 42,
"score": 0.9998520016670227,
"start": 24,
"tag": "NAME",
"value": "Alexander Cherniuk"
},
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmai... | library/nucleus/loader.coffee | ts33kr/granite | 6 | ###
Copyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
wrench = require "wrench"
colors = require "colors"
logger = require "winston"
paths = require "path"
fs = require "fs"
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the modules there and
# return an object, where keys are names of modules minus the ext.
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the module structures along with exports.
module.exports.collectModules = (directory, shallow) ->
ext = (name) -> return paths.extname name
sym = (name) -> paths.basename name, ext name
assert supported = _.toArray [".coffee", ".js"]
isSupported = (name) -> ext(name) in supported
ingest = (x) -> require paths.resolve directory, x
return Object() unless fs.existsSync directory
assert scanSync = wrench.readdirSyncRecursive
assert scanSync = fs.readdirSync if shallow
scanned = try scanSync directory.toString()
supported = _.filter scanned, isSupported
modules = _.map supported or Array(), ingest
symbols = _.map supported or Array(), sym
return _.object(symbols, modules) or {}
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the packages there and
# return an object, where keys are names of the packages (directory).
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the package structure along with modules.
module.exports.collectPackages = (closure, directory) ->
stat = (p) -> return try fs.statSync fix p
isDir = (p) -> return stat(p).isDirectory()
fix = (p) -> return paths.join directory, p
resolve = -> paths.resolve closure, directory
directory = "library" unless directory or null
directory = resolve() if _.isString closure
notification = "Collecting packages at %s"
try logger.info notification.grey, directory
nodes = fs.readdirSync directory.toString()
directories = _.toArray _.filter nodes, isDir
collectModules = module.exports.collectModules
collectPackages = module.exports.collectPackages
scanner = (d) -> collectPackages closure, fix d
symbols = _.map directories, paths.basename
packages = _.map directories, scanner
packages = _.object symbols, packages
modules = collectModules directory, yes
return _.merge modules, packages
# Traverse the hierarchy of all cached modules and try find kernel
# class that has most deep hiererachy. That is the kernel that seems
# most derived from the original one. If no such kernel can be found
# then revert to returning the original kernel embedded in framework.
# Beware, this code logic is far from idea and therefor error prone.
module.exports.cachedKernel = (limits) ->
assert _.isString limits, "no limits"
limits = paths.resolve limits.toString()
origin = require("./scaled").ScaledKernel
generc = require("./scaled").GraniteKernel
assert _.isObject(origin), "no kernel origin"
lost = -> throw new Error "cannot find kernel"
notKnown = (k) -> try k not in [origin, generic]
limiter = (m) -> m.filename.indexOf(limits) is 0
limited = _.filter require.cache or {}, limiter
spaces = _.map limited, (xmod) -> xmod.exports
hierarchy = (c) -> c.hierarchy().length or 0
isKernel = (xcls) -> try xcls.derives? origin
values = try _.flatten _.map(spaces, _.values)
objects = _.filter values or [], _.isObject
kernels = _.filter objects or [], isKernel
derived = _.filter kernels or [], notKnown
sorted = _.sortBy derived or [], hierarchy
return _.last(sorted) or origin or lost()
| 115848 | ###
Copyright (c) 2013, <NAME> <<EMAIL>>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
wrench = require "wrench"
colors = require "colors"
logger = require "winston"
paths = require "path"
fs = require "fs"
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the modules there and
# return an object, where keys are names of modules minus the ext.
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the module structures along with exports.
module.exports.collectModules = (directory, shallow) ->
ext = (name) -> return paths.extname name
sym = (name) -> paths.basename name, ext name
assert supported = _.toArray [".coffee", ".js"]
isSupported = (name) -> ext(name) in supported
ingest = (x) -> require paths.resolve directory, x
return Object() unless fs.existsSync directory
assert scanSync = wrench.readdirSyncRecursive
assert scanSync = fs.readdirSync if shallow
scanned = try scanSync directory.toString()
supported = _.filter scanned, isSupported
modules = _.map supported or Array(), ingest
symbols = _.map supported or Array(), sym
return _.object(symbols, modules) or {}
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the packages there and
# return an object, where keys are names of the packages (directory).
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the package structure along with modules.
module.exports.collectPackages = (closure, directory) ->
stat = (p) -> return try fs.statSync fix p
isDir = (p) -> return stat(p).isDirectory()
fix = (p) -> return paths.join directory, p
resolve = -> paths.resolve closure, directory
directory = "library" unless directory or null
directory = resolve() if _.isString closure
notification = "Collecting packages at %s"
try logger.info notification.grey, directory
nodes = fs.readdirSync directory.toString()
directories = _.toArray _.filter nodes, isDir
collectModules = module.exports.collectModules
collectPackages = module.exports.collectPackages
scanner = (d) -> collectPackages closure, fix d
symbols = _.map directories, paths.basename
packages = _.map directories, scanner
packages = _.object symbols, packages
modules = collectModules directory, yes
return _.merge modules, packages
# Traverse the hierarchy of all cached modules and try find kernel
# class that has most deep hiererachy. That is the kernel that seems
# most derived from the original one. If no such kernel can be found
# then revert to returning the original kernel embedded in framework.
# Beware, this code logic is far from idea and therefor error prone.
module.exports.cachedKernel = (limits) ->
assert _.isString limits, "no limits"
limits = paths.resolve limits.toString()
origin = require("./scaled").ScaledKernel
generc = require("./scaled").GraniteKernel
assert _.isObject(origin), "no kernel origin"
lost = -> throw new Error "cannot find kernel"
notKnown = (k) -> try k not in [origin, generic]
limiter = (m) -> m.filename.indexOf(limits) is 0
limited = _.filter require.cache or {}, limiter
spaces = _.map limited, (xmod) -> xmod.exports
hierarchy = (c) -> c.hierarchy().length or 0
isKernel = (xcls) -> try xcls.derives? origin
values = try _.flatten _.map(spaces, _.values)
objects = _.filter values or [], _.isObject
kernels = _.filter objects or [], isKernel
derived = _.filter kernels or [], notKnown
sorted = _.sortBy derived or [], hierarchy
return _.last(sorted) or origin or lost()
| true | ###
Copyright (c) 2013, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
wrench = require "wrench"
colors = require "colors"
logger = require "winston"
paths = require "path"
fs = require "fs"
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the modules there and
# return an object, where keys are names of modules minus the ext.
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the module structures along with exports.
module.exports.collectModules = (directory, shallow) ->
ext = (name) -> return paths.extname name
sym = (name) -> paths.basename name, ext name
assert supported = _.toArray [".coffee", ".js"]
isSupported = (name) -> ext(name) in supported
ingest = (x) -> require paths.resolve directory, x
return Object() unless fs.existsSync directory
assert scanSync = wrench.readdirSyncRecursive
assert scanSync = fs.readdirSync if shallow
scanned = try scanSync directory.toString()
supported = _.filter scanned, isSupported
modules = _.map supported or Array(), ingest
symbols = _.map supported or Array(), sym
return _.object(symbols, modules) or {}
# This method is the base method for very important functionality.
# It scans the supplied directory, find all the packages there and
# return an object, where keys are names of the packages (directory).
# This is used to build up entire module hierarchy of the framework.
# Values will be holding the package structure along with modules.
module.exports.collectPackages = (closure, directory) ->
stat = (p) -> return try fs.statSync fix p
isDir = (p) -> return stat(p).isDirectory()
fix = (p) -> return paths.join directory, p
resolve = -> paths.resolve closure, directory
directory = "library" unless directory or null
directory = resolve() if _.isString closure
notification = "Collecting packages at %s"
try logger.info notification.grey, directory
nodes = fs.readdirSync directory.toString()
directories = _.toArray _.filter nodes, isDir
collectModules = module.exports.collectModules
collectPackages = module.exports.collectPackages
scanner = (d) -> collectPackages closure, fix d
symbols = _.map directories, paths.basename
packages = _.map directories, scanner
packages = _.object symbols, packages
modules = collectModules directory, yes
return _.merge modules, packages
# Traverse the hierarchy of all cached modules and try find kernel
# class that has most deep hiererachy. That is the kernel that seems
# most derived from the original one. If no such kernel can be found
# then revert to returning the original kernel embedded in framework.
# Beware, this code logic is far from idea and therefor error prone.
module.exports.cachedKernel = (limits) ->
assert _.isString limits, "no limits"
limits = paths.resolve limits.toString()
origin = require("./scaled").ScaledKernel
generc = require("./scaled").GraniteKernel
assert _.isObject(origin), "no kernel origin"
lost = -> throw new Error "cannot find kernel"
notKnown = (k) -> try k not in [origin, generic]
limiter = (m) -> m.filename.indexOf(limits) is 0
limited = _.filter require.cache or {}, limiter
spaces = _.map limited, (xmod) -> xmod.exports
hierarchy = (c) -> c.hierarchy().length or 0
isKernel = (xcls) -> try xcls.derives? origin
values = try _.flatten _.map(spaces, _.values)
objects = _.filter values or [], _.isObject
kernels = _.filter objects or [], isKernel
derived = _.filter kernels or [], notKnown
sorted = _.sortBy derived or [], hierarchy
return _.last(sorted) or origin or lost()
|
[
{
"context": "# angularScopeExtend / Alex Solo 2015 / License: MIT\n\n'format global'\n'deps angula",
"end": 32,
"score": 0.999854564666748,
"start": 23,
"tag": "NAME",
"value": "Alex Solo"
},
{
"context": " listens.push\n key: name\n ... | angular-scope-extend.coffee | civilframe/angular-scope-extend | 3 | # angularScopeExtend / Alex Solo 2015 / License: MIT
'format global'
'deps angular'
do ->
# Internal functions
_isObject = (obj) ->
return typeof obj is 'object'
_isFunction = (obj) ->
return typeof obj is 'function'
_isArray = (obj) ->
return toString.call(obj) is '[object Array]'
_warning = (msg) ->
console.warn 'scopeExtend - ' + msg
# The module
angularScopeExtend = (angular) ->
module = angular.module('angular-scope-extend', [])
module.factory 'scopeExtend', ->
# The scopeExtend factory proper, which is a function
return (scope, members) ->
# Perform sanity checks
unless _isObject(scope)
_warning 'Scope parameter is not an object, exiting early.'
return
unless _isObject(members)
_warning 'Members parameter is not an object, exiting early.'
return
# Setup internal members
listens = []
watches = []
_setupWatch = (name, params) ->
unless _isFunction(params) or _isObject(params)
_warning 'Watch parameter must be a function or object, skipping.'
return
if _isFunction(params)
expression = name
callback = params
depth = 'shallow'
if _isObject(params)
if params.expressionGroup
unless _isArray(params.expressionGroup)
_warning 'Watch parameter expressionGroup must be an array, skipping.'
return
expression = params.expressionGroup
depth = 'group'
else
expression = params.expression || name
depth = params.depth || 'shallow'
callback = params.callback
deregister = switch depth
when 'shallow' then scope.$watch(expression, -> callback.apply(scope, arguments))
when 'deep' then scope.$watch(expression, (-> callback.apply(scope, arguments)), true)
when 'collection' then scope.$watchCollection(expression, -> callback.apply(scope, arguments))
when 'group' then scope.$watchGroup(expression, -> callback.apply(scope, arguments))
watches.push
key: name
callback: deregister
_setupListen = (name, callback) ->
deregister = scope.$on(name, -> callback.apply(scope, arguments))
listens.push
key: name
callback: deregister
# Extend the scope with members
if members.variables
for own variableName, variableValue of members.variables
scope[variableName] = variableValue
# Extend the scope with methods
if members.methods
for own methodName, methodBody of members.methods
scope[methodName] = methodBody
# Register watch listeners
if members.watch
for own watchName, watchParams of members.watch
_setupWatch(watchName, watchParams)
# Register event listeners
if members.listen
for own listenName, listenCallback of members.listen
_setupListen(listenName, listenCallback)
# Extend the scope with _forgetWatch, for deregistering watches
scope._forgetWatch = (watchName) ->
watchesToRemove = []
for watch in watches
continue unless watch.key is watchName
watch.callback()
watchesToRemove.push(watch)
for remove in watchesToRemove
index = watches.indexOf(remove)
watches.splice(index, 1)
# Extend the scope with _forgetListen, for deregistering listeners
scope._forgetListen = (listenName) ->
listensToRemove = []
for listen in listens
continue unless listen.key is listenName
listen.callback()
listensToRemove.push(listen)
for remove in listensToRemove
index = listens.indexOf(remove)
listens.splice(index, 1)
# Call the initialize methods
if members.initialize
members.initialize.apply(scope)
return module
# Export module in differnet formats
if typeof define is 'function' and define.amd
define('angular-scope-extend', ['angular'], angularScopeExtend)
else
angularScopeExtend(window.angular)
| 90870 | # angularScopeExtend / <NAME> 2015 / License: MIT
'format global'
'deps angular'
do ->
# Internal functions
_isObject = (obj) ->
return typeof obj is 'object'
_isFunction = (obj) ->
return typeof obj is 'function'
_isArray = (obj) ->
return toString.call(obj) is '[object Array]'
_warning = (msg) ->
console.warn 'scopeExtend - ' + msg
# The module
angularScopeExtend = (angular) ->
module = angular.module('angular-scope-extend', [])
module.factory 'scopeExtend', ->
# The scopeExtend factory proper, which is a function
return (scope, members) ->
# Perform sanity checks
unless _isObject(scope)
_warning 'Scope parameter is not an object, exiting early.'
return
unless _isObject(members)
_warning 'Members parameter is not an object, exiting early.'
return
# Setup internal members
listens = []
watches = []
_setupWatch = (name, params) ->
unless _isFunction(params) or _isObject(params)
_warning 'Watch parameter must be a function or object, skipping.'
return
if _isFunction(params)
expression = name
callback = params
depth = 'shallow'
if _isObject(params)
if params.expressionGroup
unless _isArray(params.expressionGroup)
_warning 'Watch parameter expressionGroup must be an array, skipping.'
return
expression = params.expressionGroup
depth = 'group'
else
expression = params.expression || name
depth = params.depth || 'shallow'
callback = params.callback
deregister = switch depth
when 'shallow' then scope.$watch(expression, -> callback.apply(scope, arguments))
when 'deep' then scope.$watch(expression, (-> callback.apply(scope, arguments)), true)
when 'collection' then scope.$watchCollection(expression, -> callback.apply(scope, arguments))
when 'group' then scope.$watchGroup(expression, -> callback.apply(scope, arguments))
watches.push
key: name
callback: deregister
_setupListen = (name, callback) ->
deregister = scope.$on(name, -> callback.apply(scope, arguments))
listens.push
key: <NAME>
callback: deregister
# Extend the scope with members
if members.variables
for own variableName, variableValue of members.variables
scope[variableName] = variableValue
# Extend the scope with methods
if members.methods
for own methodName, methodBody of members.methods
scope[methodName] = methodBody
# Register watch listeners
if members.watch
for own watchName, watchParams of members.watch
_setupWatch(watchName, watchParams)
# Register event listeners
if members.listen
for own listenName, listenCallback of members.listen
_setupListen(listenName, listenCallback)
# Extend the scope with _forgetWatch, for deregistering watches
scope._forgetWatch = (watchName) ->
watchesToRemove = []
for watch in watches
continue unless watch.key is watchName
watch.callback()
watchesToRemove.push(watch)
for remove in watchesToRemove
index = watches.indexOf(remove)
watches.splice(index, 1)
# Extend the scope with _forgetListen, for deregistering listeners
scope._forgetListen = (listenName) ->
listensToRemove = []
for listen in listens
continue unless listen.key is listenName
listen.callback()
listensToRemove.push(listen)
for remove in listensToRemove
index = listens.indexOf(remove)
listens.splice(index, 1)
# Call the initialize methods
if members.initialize
members.initialize.apply(scope)
return module
# Export module in differnet formats
if typeof define is 'function' and define.amd
define('angular-scope-extend', ['angular'], angularScopeExtend)
else
angularScopeExtend(window.angular)
| true | # angularScopeExtend / PI:NAME:<NAME>END_PI 2015 / License: MIT
'format global'
'deps angular'
do ->
# Internal functions
_isObject = (obj) ->
return typeof obj is 'object'
_isFunction = (obj) ->
return typeof obj is 'function'
_isArray = (obj) ->
return toString.call(obj) is '[object Array]'
_warning = (msg) ->
console.warn 'scopeExtend - ' + msg
# The module
angularScopeExtend = (angular) ->
module = angular.module('angular-scope-extend', [])
module.factory 'scopeExtend', ->
# The scopeExtend factory proper, which is a function
return (scope, members) ->
# Perform sanity checks
unless _isObject(scope)
_warning 'Scope parameter is not an object, exiting early.'
return
unless _isObject(members)
_warning 'Members parameter is not an object, exiting early.'
return
# Setup internal members
listens = []
watches = []
_setupWatch = (name, params) ->
unless _isFunction(params) or _isObject(params)
_warning 'Watch parameter must be a function or object, skipping.'
return
if _isFunction(params)
expression = name
callback = params
depth = 'shallow'
if _isObject(params)
if params.expressionGroup
unless _isArray(params.expressionGroup)
_warning 'Watch parameter expressionGroup must be an array, skipping.'
return
expression = params.expressionGroup
depth = 'group'
else
expression = params.expression || name
depth = params.depth || 'shallow'
callback = params.callback
deregister = switch depth
when 'shallow' then scope.$watch(expression, -> callback.apply(scope, arguments))
when 'deep' then scope.$watch(expression, (-> callback.apply(scope, arguments)), true)
when 'collection' then scope.$watchCollection(expression, -> callback.apply(scope, arguments))
when 'group' then scope.$watchGroup(expression, -> callback.apply(scope, arguments))
watches.push
key: name
callback: deregister
_setupListen = (name, callback) ->
deregister = scope.$on(name, -> callback.apply(scope, arguments))
listens.push
key: PI:NAME:<NAME>END_PI
callback: deregister
# Extend the scope with members
if members.variables
for own variableName, variableValue of members.variables
scope[variableName] = variableValue
# Extend the scope with methods
if members.methods
for own methodName, methodBody of members.methods
scope[methodName] = methodBody
# Register watch listeners
if members.watch
for own watchName, watchParams of members.watch
_setupWatch(watchName, watchParams)
# Register event listeners
if members.listen
for own listenName, listenCallback of members.listen
_setupListen(listenName, listenCallback)
# Extend the scope with _forgetWatch, for deregistering watches
scope._forgetWatch = (watchName) ->
watchesToRemove = []
for watch in watches
continue unless watch.key is watchName
watch.callback()
watchesToRemove.push(watch)
for remove in watchesToRemove
index = watches.indexOf(remove)
watches.splice(index, 1)
# Extend the scope with _forgetListen, for deregistering listeners
scope._forgetListen = (listenName) ->
listensToRemove = []
for listen in listens
continue unless listen.key is listenName
listen.callback()
listensToRemove.push(listen)
for remove in listensToRemove
index = listens.indexOf(remove)
listens.splice(index, 1)
# Call the initialize methods
if members.initialize
members.initialize.apply(scope)
return module
# Export module in differnet formats
if typeof define is 'function' and define.amd
define('angular-scope-extend', ['angular'], angularScopeExtend)
else
angularScopeExtend(window.angular)
|
[
{
"context": "AssetModel\n display_name: \"test asset\"\n url: 'actual_asset",
"end": 1785,
"score": 0.7807860970497131,
"start": 1781,
"tag": "NAME",
"value": "test"
}
] | work-files/install-native/cms/static/coffee/spec/views/assets_spec.coffee | lpm0073/netec-edx-theme | 0 | define ["jquery", "edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers", "squire"],
($, AjaxHelpers, Squire) ->
assetLibraryTpl = readFixtures('asset-library.underscore')
assetTpl = readFixtures('asset.underscore')
describe "Asset view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
appendSetFixtures(sandbox({id: "page-prompt"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@confirmationSpies = jasmine.createSpyObj('Notification.Confirmation', ["constructor", "show"])
@confirmationSpies.constructor.and.returnValue(@confirmationSpies)
@confirmationSpies.show.and.returnValue(@confirmationSpies)
@savingSpies = jasmine.createSpyObj('Notification.Mini', ["constructor", "show", "hide"])
@savingSpies.constructor.and.returnValue(@savingSpies)
@savingSpies.show.and.returnValue(@savingSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@injector.mock("common/js/components/views/feedback_notification", {
"Confirmation": @confirmationSpies.constructor,
"Mini": @savingSpies.constructor
})
@injector.require ["js/models/asset", "js/collections/asset", "js/views/asset"],
(AssetModel, AssetCollection, AssetView) =>
@model = new AssetModel
display_name: "test asset"
url: 'actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'id'
spyOn(@model, "destroy").and.callThrough()
spyOn(@model, "save").and.callThrough()
@collection = new AssetCollection([@model])
@collection.url = "assets-url"
@createAssetView = (test) =>
view = new AssetView({model: @model})
requests = if test then AjaxHelpers["requests"](test) else null
return {view: view, requests: requests}
done()
afterEach ->
@injector.clean()
@injector.remove()
describe "Basic", ->
it "should render properly", ->
{view: @view, requests: requests} = @createAssetView()
@view.render()
expect(@view.$el).toContainText("test asset")
it "should pop a delete confirmation when the delete button is clicked", ->
{view: @view, requests: requests} = @createAssetView()
@view.render().$(".remove-asset-button").click()
expect(@promptSpies.constructor).toHaveBeenCalled()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
expect(ctorOptions.title).toMatch('Delete File Confirmation')
# hasn't actually been removed
expect(@model.destroy).not.toHaveBeenCalled()
expect(@collection).toContain(@model)
describe "AJAX", ->
it "should destroy itself on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
# return a success response
requests[0].respond(204)
expect(@confirmationSpies.constructor).toHaveBeenCalled()
expect(@confirmationSpies.show).toHaveBeenCalled()
savingOptions = @confirmationSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Your file has been deleted.")
expect(@collection.contains(@model)).toBeFalsy()
it "should not destroy itself if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
# return an error response
requests[0].respond(404)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
it "should lock the asset on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# AJAX request has been sent, but not yet returned
expect(@model.save).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@savingSpies.constructor).toHaveBeenCalled()
expect(@savingSpies.show).toHaveBeenCalled()
savingOptions = @savingSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Saving")
expect(@model.get("locked")).toBeFalsy()
# return a success response
requests[0].respond(204)
expect(@savingSpies.hide).toHaveBeenCalled()
expect(@model.get("locked")).toBeTruthy()
it "should not lock the asset if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# return an error response
requests[0].respond(404)
# Don't call hide because that closes the notification showing the server error.
expect(@savingSpies.hide).not.toHaveBeenCalled()
expect(@model.get("locked")).toBeFalsy()
describe "Assets view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-library-tpl", type: "text/template"}).text(assetLibraryTpl))
appendSetFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
window.analytics = jasmine.createSpyObj('analytics', ['track'])
window.course_location_analytics = jasmine.createSpy()
appendSetFixtures(sandbox({id: "asset_table_body"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@mockAsset1 = {
display_name: "test asset 1"
url: 'actual_asset_url_1'
portable_url: 'portable_url_1'
date_added: 'date_1'
thumbnail: null
id: 'id_1'
}
@mockAsset2 = {
display_name: "test asset 2"
url: 'actual_asset_url_2'
portable_url: 'portable_url_2'
date_added: 'date_2'
thumbnail: null
id: 'id_2'
}
@mockAssetsResponse = {
assets: [ @mockAsset1, @mockAsset2 ],
start: 0,
end: 1,
page: 0,
pageSize: 5,
totalCount: 2
}
@injector.require ["js/models/asset", "js/collections/asset", "js/views/assets"],
(AssetModel, AssetCollection, AssetsView) =>
@AssetModel = AssetModel
@collection = new AssetCollection();
@collection.url = "assets-url"
@createAssetsView = (test) =>
requests = AjaxHelpers.requests(test)
view = new AssetsView
collection: @collection
el: $('#asset_table_body')
view.render()
return {view: view, requests: requests}
done()
$.ajax()
afterEach ->
delete window.analytics
delete window.course_location_analytics
@injector.clean()
@injector.remove()
addMockAsset = (requests) ->
model = new @AssetModel
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
@view.addAsset(model)
AjaxHelpers.respondWithJson(requests,
{
assets: [
@mockAsset1, @mockAsset2,
{
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
}
],
start: 0,
end: 2,
page: 0,
pageSize: 5,
totalCount: 3
})
describe "Basic", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
$.fn.fileupload = ->
return ''
clickEvent = (html_selector) ->
$(html_selector).click()
it "should show upload modal on clicking upload asset button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showUploadModal")
setup.call(this, requests)
expect(@view.showUploadModal).not.toHaveBeenCalled()
@view.showUploadModal(clickEvent(".upload-button"))
expect(@view.showUploadModal).toHaveBeenCalled()
it "should show file selection menu on choose file button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showFileSelectionMenu")
setup.call(this, requests)
expect(@view.showFileSelectionMenu).not.toHaveBeenCalled()
@view.showFileSelectionMenu(clickEvent(".choose-file-button"))
expect(@view.showFileSelectionMenu).toHaveBeenCalled()
it "should hide upload modal on clicking close button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "hideModal")
setup.call(this, requests)
expect(@view.hideModal).not.toHaveBeenCalled()
@view.hideModal(clickEvent(".close-button"))
expect(@view.hideModal).toHaveBeenCalled()
it "should show a status indicator while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
setup.call(this, requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should hide the status indicator if an error occurs while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
@view.pagingView.setPage(1)
AjaxHelpers.respondWithError(requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should render both assets", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "should remove the deleted asset from the view", ->
{view: @view, requests: requests} = @createAssetsView(this)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
setup.call(this, requests)
# Delete the 2nd asset with success from server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithNoContent(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).not.toContainText("test asset 2")
it "does not remove asset if deletion failed", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
# Delete the 2nd asset, but mimic a failure from the server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithError(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "adds an asset if asset does not already exist", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
addMockAsset.call(this, requests)
expect(@view.$el).toContainText("new asset")
expect(@collection.models.length).toBe(3)
it "does not add an asset if asset already exists", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
spyOn(@collection, "add").and.callThrough()
model = @collection.models[1]
@view.addAsset(model)
expect(@collection.add).not.toHaveBeenCalled()
describe "Sorting", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
it "should have the correct default sort order", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should toggle the sort order when clicking on the currently sorted column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch the sort order when clicking on a different column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch sort to most recent date added when a new asset is added", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
addMockAsset.call(this, requests)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
| 76080 | define ["jquery", "edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers", "squire"],
($, AjaxHelpers, Squire) ->
assetLibraryTpl = readFixtures('asset-library.underscore')
assetTpl = readFixtures('asset.underscore')
describe "Asset view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
appendSetFixtures(sandbox({id: "page-prompt"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@confirmationSpies = jasmine.createSpyObj('Notification.Confirmation', ["constructor", "show"])
@confirmationSpies.constructor.and.returnValue(@confirmationSpies)
@confirmationSpies.show.and.returnValue(@confirmationSpies)
@savingSpies = jasmine.createSpyObj('Notification.Mini', ["constructor", "show", "hide"])
@savingSpies.constructor.and.returnValue(@savingSpies)
@savingSpies.show.and.returnValue(@savingSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@injector.mock("common/js/components/views/feedback_notification", {
"Confirmation": @confirmationSpies.constructor,
"Mini": @savingSpies.constructor
})
@injector.require ["js/models/asset", "js/collections/asset", "js/views/asset"],
(AssetModel, AssetCollection, AssetView) =>
@model = new AssetModel
display_name: "<NAME> asset"
url: 'actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'id'
spyOn(@model, "destroy").and.callThrough()
spyOn(@model, "save").and.callThrough()
@collection = new AssetCollection([@model])
@collection.url = "assets-url"
@createAssetView = (test) =>
view = new AssetView({model: @model})
requests = if test then AjaxHelpers["requests"](test) else null
return {view: view, requests: requests}
done()
afterEach ->
@injector.clean()
@injector.remove()
describe "Basic", ->
it "should render properly", ->
{view: @view, requests: requests} = @createAssetView()
@view.render()
expect(@view.$el).toContainText("test asset")
it "should pop a delete confirmation when the delete button is clicked", ->
{view: @view, requests: requests} = @createAssetView()
@view.render().$(".remove-asset-button").click()
expect(@promptSpies.constructor).toHaveBeenCalled()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
expect(ctorOptions.title).toMatch('Delete File Confirmation')
# hasn't actually been removed
expect(@model.destroy).not.toHaveBeenCalled()
expect(@collection).toContain(@model)
describe "AJAX", ->
it "should destroy itself on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
# return a success response
requests[0].respond(204)
expect(@confirmationSpies.constructor).toHaveBeenCalled()
expect(@confirmationSpies.show).toHaveBeenCalled()
savingOptions = @confirmationSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Your file has been deleted.")
expect(@collection.contains(@model)).toBeFalsy()
it "should not destroy itself if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
# return an error response
requests[0].respond(404)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
it "should lock the asset on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# AJAX request has been sent, but not yet returned
expect(@model.save).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@savingSpies.constructor).toHaveBeenCalled()
expect(@savingSpies.show).toHaveBeenCalled()
savingOptions = @savingSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Saving")
expect(@model.get("locked")).toBeFalsy()
# return a success response
requests[0].respond(204)
expect(@savingSpies.hide).toHaveBeenCalled()
expect(@model.get("locked")).toBeTruthy()
it "should not lock the asset if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# return an error response
requests[0].respond(404)
# Don't call hide because that closes the notification showing the server error.
expect(@savingSpies.hide).not.toHaveBeenCalled()
expect(@model.get("locked")).toBeFalsy()
describe "Assets view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-library-tpl", type: "text/template"}).text(assetLibraryTpl))
appendSetFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
window.analytics = jasmine.createSpyObj('analytics', ['track'])
window.course_location_analytics = jasmine.createSpy()
appendSetFixtures(sandbox({id: "asset_table_body"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@mockAsset1 = {
display_name: "test asset 1"
url: 'actual_asset_url_1'
portable_url: 'portable_url_1'
date_added: 'date_1'
thumbnail: null
id: 'id_1'
}
@mockAsset2 = {
display_name: "test asset 2"
url: 'actual_asset_url_2'
portable_url: 'portable_url_2'
date_added: 'date_2'
thumbnail: null
id: 'id_2'
}
@mockAssetsResponse = {
assets: [ @mockAsset1, @mockAsset2 ],
start: 0,
end: 1,
page: 0,
pageSize: 5,
totalCount: 2
}
@injector.require ["js/models/asset", "js/collections/asset", "js/views/assets"],
(AssetModel, AssetCollection, AssetsView) =>
@AssetModel = AssetModel
@collection = new AssetCollection();
@collection.url = "assets-url"
@createAssetsView = (test) =>
requests = AjaxHelpers.requests(test)
view = new AssetsView
collection: @collection
el: $('#asset_table_body')
view.render()
return {view: view, requests: requests}
done()
$.ajax()
afterEach ->
delete window.analytics
delete window.course_location_analytics
@injector.clean()
@injector.remove()
addMockAsset = (requests) ->
model = new @AssetModel
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
@view.addAsset(model)
AjaxHelpers.respondWithJson(requests,
{
assets: [
@mockAsset1, @mockAsset2,
{
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
}
],
start: 0,
end: 2,
page: 0,
pageSize: 5,
totalCount: 3
})
describe "Basic", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
$.fn.fileupload = ->
return ''
clickEvent = (html_selector) ->
$(html_selector).click()
it "should show upload modal on clicking upload asset button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showUploadModal")
setup.call(this, requests)
expect(@view.showUploadModal).not.toHaveBeenCalled()
@view.showUploadModal(clickEvent(".upload-button"))
expect(@view.showUploadModal).toHaveBeenCalled()
it "should show file selection menu on choose file button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showFileSelectionMenu")
setup.call(this, requests)
expect(@view.showFileSelectionMenu).not.toHaveBeenCalled()
@view.showFileSelectionMenu(clickEvent(".choose-file-button"))
expect(@view.showFileSelectionMenu).toHaveBeenCalled()
it "should hide upload modal on clicking close button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "hideModal")
setup.call(this, requests)
expect(@view.hideModal).not.toHaveBeenCalled()
@view.hideModal(clickEvent(".close-button"))
expect(@view.hideModal).toHaveBeenCalled()
it "should show a status indicator while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
setup.call(this, requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should hide the status indicator if an error occurs while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
@view.pagingView.setPage(1)
AjaxHelpers.respondWithError(requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should render both assets", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "should remove the deleted asset from the view", ->
{view: @view, requests: requests} = @createAssetsView(this)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
setup.call(this, requests)
# Delete the 2nd asset with success from server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithNoContent(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).not.toContainText("test asset 2")
it "does not remove asset if deletion failed", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
# Delete the 2nd asset, but mimic a failure from the server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithError(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "adds an asset if asset does not already exist", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
addMockAsset.call(this, requests)
expect(@view.$el).toContainText("new asset")
expect(@collection.models.length).toBe(3)
it "does not add an asset if asset already exists", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
spyOn(@collection, "add").and.callThrough()
model = @collection.models[1]
@view.addAsset(model)
expect(@collection.add).not.toHaveBeenCalled()
describe "Sorting", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
it "should have the correct default sort order", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should toggle the sort order when clicking on the currently sorted column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch the sort order when clicking on a different column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch sort to most recent date added when a new asset is added", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
addMockAsset.call(this, requests)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
| true | define ["jquery", "edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers", "squire"],
($, AjaxHelpers, Squire) ->
assetLibraryTpl = readFixtures('asset-library.underscore')
assetTpl = readFixtures('asset.underscore')
describe "Asset view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
appendSetFixtures(sandbox({id: "page-prompt"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@confirmationSpies = jasmine.createSpyObj('Notification.Confirmation', ["constructor", "show"])
@confirmationSpies.constructor.and.returnValue(@confirmationSpies)
@confirmationSpies.show.and.returnValue(@confirmationSpies)
@savingSpies = jasmine.createSpyObj('Notification.Mini', ["constructor", "show", "hide"])
@savingSpies.constructor.and.returnValue(@savingSpies)
@savingSpies.show.and.returnValue(@savingSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@injector.mock("common/js/components/views/feedback_notification", {
"Confirmation": @confirmationSpies.constructor,
"Mini": @savingSpies.constructor
})
@injector.require ["js/models/asset", "js/collections/asset", "js/views/asset"],
(AssetModel, AssetCollection, AssetView) =>
@model = new AssetModel
display_name: "PI:NAME:<NAME>END_PI asset"
url: 'actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'id'
spyOn(@model, "destroy").and.callThrough()
spyOn(@model, "save").and.callThrough()
@collection = new AssetCollection([@model])
@collection.url = "assets-url"
@createAssetView = (test) =>
view = new AssetView({model: @model})
requests = if test then AjaxHelpers["requests"](test) else null
return {view: view, requests: requests}
done()
afterEach ->
@injector.clean()
@injector.remove()
describe "Basic", ->
it "should render properly", ->
{view: @view, requests: requests} = @createAssetView()
@view.render()
expect(@view.$el).toContainText("test asset")
it "should pop a delete confirmation when the delete button is clicked", ->
{view: @view, requests: requests} = @createAssetView()
@view.render().$(".remove-asset-button").click()
expect(@promptSpies.constructor).toHaveBeenCalled()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
expect(ctorOptions.title).toMatch('Delete File Confirmation')
# hasn't actually been removed
expect(@model.destroy).not.toHaveBeenCalled()
expect(@collection).toContain(@model)
describe "AJAX", ->
it "should destroy itself on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
# return a success response
requests[0].respond(204)
expect(@confirmationSpies.constructor).toHaveBeenCalled()
expect(@confirmationSpies.show).toHaveBeenCalled()
savingOptions = @confirmationSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Your file has been deleted.")
expect(@collection.contains(@model)).toBeFalsy()
it "should not destroy itself if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".remove-asset-button").click()
ctorOptions = @promptSpies.constructor.calls.mostRecent().args[0]
# run the primary function to indicate confirmation
ctorOptions.actions.primary.click(@promptSpies)
# AJAX request has been sent, but not yet returned
expect(@model.destroy).toHaveBeenCalled()
# return an error response
requests[0].respond(404)
expect(@confirmationSpies.constructor).not.toHaveBeenCalled()
expect(@collection.contains(@model)).toBeTruthy()
it "should lock the asset on confirmation", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# AJAX request has been sent, but not yet returned
expect(@model.save).toHaveBeenCalled()
expect(requests.length).toEqual(1)
expect(@savingSpies.constructor).toHaveBeenCalled()
expect(@savingSpies.show).toHaveBeenCalled()
savingOptions = @savingSpies.constructor.calls.mostRecent().args[0]
expect(savingOptions.title).toMatch("Saving")
expect(@model.get("locked")).toBeFalsy()
# return a success response
requests[0].respond(204)
expect(@savingSpies.hide).toHaveBeenCalled()
expect(@model.get("locked")).toBeTruthy()
it "should not lock the asset if server errors", ->
{view: @view, requests: requests} = @createAssetView(this)
@view.render().$(".lock-checkbox").click()
# return an error response
requests[0].respond(404)
# Don't call hide because that closes the notification showing the server error.
expect(@savingSpies.hide).not.toHaveBeenCalled()
expect(@model.get("locked")).toBeFalsy()
describe "Assets view", ->
beforeEach (done) ->
setFixtures($("<script>", {id: "asset-library-tpl", type: "text/template"}).text(assetLibraryTpl))
appendSetFixtures($("<script>", {id: "asset-tpl", type: "text/template"}).text(assetTpl))
window.analytics = jasmine.createSpyObj('analytics', ['track'])
window.course_location_analytics = jasmine.createSpy()
appendSetFixtures(sandbox({id: "asset_table_body"}))
@promptSpies = jasmine.createSpyObj('Prompt.Warning', ["constructor", "show", "hide"])
@promptSpies.constructor.and.returnValue(@promptSpies)
@promptSpies.show.and.returnValue(@promptSpies)
@injector = new Squire()
@injector.mock("common/js/components/views/feedback_prompt", {
"Warning": @promptSpies.constructor
})
@mockAsset1 = {
display_name: "test asset 1"
url: 'actual_asset_url_1'
portable_url: 'portable_url_1'
date_added: 'date_1'
thumbnail: null
id: 'id_1'
}
@mockAsset2 = {
display_name: "test asset 2"
url: 'actual_asset_url_2'
portable_url: 'portable_url_2'
date_added: 'date_2'
thumbnail: null
id: 'id_2'
}
@mockAssetsResponse = {
assets: [ @mockAsset1, @mockAsset2 ],
start: 0,
end: 1,
page: 0,
pageSize: 5,
totalCount: 2
}
@injector.require ["js/models/asset", "js/collections/asset", "js/views/assets"],
(AssetModel, AssetCollection, AssetsView) =>
@AssetModel = AssetModel
@collection = new AssetCollection();
@collection.url = "assets-url"
@createAssetsView = (test) =>
requests = AjaxHelpers.requests(test)
view = new AssetsView
collection: @collection
el: $('#asset_table_body')
view.render()
return {view: view, requests: requests}
done()
$.ajax()
afterEach ->
delete window.analytics
delete window.course_location_analytics
@injector.clean()
@injector.remove()
addMockAsset = (requests) ->
model = new @AssetModel
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
@view.addAsset(model)
AjaxHelpers.respondWithJson(requests,
{
assets: [
@mockAsset1, @mockAsset2,
{
display_name: "new asset"
url: 'new_actual_asset_url'
portable_url: 'portable_url'
date_added: 'date'
thumbnail: null
id: 'idx'
}
],
start: 0,
end: 2,
page: 0,
pageSize: 5,
totalCount: 3
})
describe "Basic", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
$.fn.fileupload = ->
return ''
clickEvent = (html_selector) ->
$(html_selector).click()
it "should show upload modal on clicking upload asset button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showUploadModal")
setup.call(this, requests)
expect(@view.showUploadModal).not.toHaveBeenCalled()
@view.showUploadModal(clickEvent(".upload-button"))
expect(@view.showUploadModal).toHaveBeenCalled()
it "should show file selection menu on choose file button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "showFileSelectionMenu")
setup.call(this, requests)
expect(@view.showFileSelectionMenu).not.toHaveBeenCalled()
@view.showFileSelectionMenu(clickEvent(".choose-file-button"))
expect(@view.showFileSelectionMenu).toHaveBeenCalled()
it "should hide upload modal on clicking close button", ->
{view: @view, requests: requests} = @createAssetsView(this)
spyOn(@view, "hideModal")
setup.call(this, requests)
expect(@view.hideModal).not.toHaveBeenCalled()
@view.hideModal(clickEvent(".close-button"))
expect(@view.hideModal).toHaveBeenCalled()
it "should show a status indicator while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
setup.call(this, requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should hide the status indicator if an error occurs while loading", ->
{view: @view, requests: requests} = @createAssetsView(this)
appendSetFixtures('<div class="ui-loading"/>')
expect($('.ui-loading').is(':visible')).toBe(true)
@view.pagingView.setPage(1)
AjaxHelpers.respondWithError(requests)
expect($('.ui-loading').is(':visible')).toBe(false)
it "should render both assets", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "should remove the deleted asset from the view", ->
{view: @view, requests: requests} = @createAssetsView(this)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
setup.call(this, requests)
# Delete the 2nd asset with success from server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithNoContent(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).not.toContainText("test asset 2")
it "does not remove asset if deletion failed", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
# Delete the 2nd asset, but mimic a failure from the server.
@view.$(".remove-asset-button")[1].click()
@promptSpies.constructor.calls.mostRecent().args[0].actions.primary.click(@promptSpies)
AjaxHelpers.respondWithError(requests)
expect(@view.$el).toContainText("test asset 1")
expect(@view.$el).toContainText("test asset 2")
it "adds an asset if asset does not already exist", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
addMockAsset.call(this, requests)
expect(@view.$el).toContainText("new asset")
expect(@collection.models.length).toBe(3)
it "does not add an asset if asset already exists", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
spyOn(@collection, "add").and.callThrough()
model = @collection.models[1]
@view.addAsset(model)
expect(@collection.add).not.toHaveBeenCalled()
describe "Sorting", ->
# Separate setup method to work-around mis-parenting of beforeEach methods
setup = (requests) ->
@view.pagingView.setPage(1)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
it "should have the correct default sort order", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should toggle the sort order when clicking on the currently sorted column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-date-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch the sort order when clicking on a different column", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("asc")
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Name")
expect(@view.collection.sortDirection).toBe("desc")
it "should switch sort to most recent date added when a new asset is added", ->
{view: @view, requests: requests} = @createAssetsView(this)
setup.call(this, requests)
@view.$("#js-asset-name-col").click()
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
addMockAsset.call(this, requests)
AjaxHelpers.respondWithJson(requests, @mockAssetsResponse)
expect(@view.pagingView.sortDisplayName()).toBe("Date Added")
expect(@view.collection.sortDirection).toBe("desc")
|
[
{
"context": "irbrake(on: (->), {\n id: '1'\n key: '2'\n version: '0.0'\n name: 'testing'\n ",
"end": 443,
"score": 0.922469973564148,
"start": 442,
"tag": "KEY",
"value": "2"
},
{
"context": "should.equal '1'\n airbrake.key.should.equal '2'\n ai... | test/unit/airbrake.coffee | Wayfarer247/airbrake-hapi | 0 | should = require('chai').should()
Airbrake = require '../../lib/airbrake'
Info = require '../airbrake_info.json'
describe 'Airbrake', ->
it 'should exist', ->
should.exist Airbrake
it 'should be creatable', ->
server = on: ->
airbrake = new Airbrake(server)
should.exist airbrake
describe 'creating', ->
it 'should set the properties', ->
airbrake = new Airbrake(on: (->), {
id: '1'
key: '2'
version: '0.0'
name: 'testing'
})
airbrake.id.should.equal '1'
airbrake.key.should.equal '2'
airbrake.version.should.equal '0.0'
airbrake.name.should.equal 'testing'
airbrake.os.should.be.ok
airbrake.language.should.contain 'Node'
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should get the git version if in git', (done)->
airbrake = new Airbrake(on: (->), {
id: '1'
key: '2'
})
airbrake.start ->
airbrake.version.should.have.length 40
airbrake.version.should.be.ok
done()
describe 'An Airbrake instance', ->
it 'should fail to start with no id/key', (done)->
airbrake = new Airbrake(on: ->)
airbrake.start (err)->
err.message.should.equal 'No Project ID or API Key was given!'
done()
it 'should not crash when getting context', ->
airbrake = new Airbrake(on: (->), config: Info)
airbrake.os = undefined
airbrake.environment = undefined
airbrake.language = undefined
airbrake.rootDirectory = undefined
airbrake.context()
airbrake.os.should.be.ok
airbrake.language.should.be.ok
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should return nil when in development', ->
airbrake = new Airbrake(on: (->), config: Info)
error = new Error('It Broke Here')
hapiRequest =
url:
path: '/testing/test/t'
result = airbrake.notify(error, hapiRequest)
should.not.exist result
| 191894 | should = require('chai').should()
Airbrake = require '../../lib/airbrake'
Info = require '../airbrake_info.json'
describe 'Airbrake', ->
it 'should exist', ->
should.exist Airbrake
it 'should be creatable', ->
server = on: ->
airbrake = new Airbrake(server)
should.exist airbrake
describe 'creating', ->
it 'should set the properties', ->
airbrake = new Airbrake(on: (->), {
id: '1'
key: '<KEY>'
version: '0.0'
name: 'testing'
})
airbrake.id.should.equal '1'
airbrake.key.should.equal '<KEY>'
airbrake.version.should.equal '0.0'
airbrake.name.should.equal 'testing'
airbrake.os.should.be.ok
airbrake.language.should.contain 'Node'
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should get the git version if in git', (done)->
airbrake = new Airbrake(on: (->), {
id: '1'
key: '2'
})
airbrake.start ->
airbrake.version.should.have.length 40
airbrake.version.should.be.ok
done()
describe 'An Airbrake instance', ->
it 'should fail to start with no id/key', (done)->
airbrake = new Airbrake(on: ->)
airbrake.start (err)->
err.message.should.equal 'No Project ID or API Key was given!'
done()
it 'should not crash when getting context', ->
airbrake = new Airbrake(on: (->), config: Info)
airbrake.os = undefined
airbrake.environment = undefined
airbrake.language = undefined
airbrake.rootDirectory = undefined
airbrake.context()
airbrake.os.should.be.ok
airbrake.language.should.be.ok
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should return nil when in development', ->
airbrake = new Airbrake(on: (->), config: Info)
error = new Error('It Broke Here')
hapiRequest =
url:
path: '/testing/test/t'
result = airbrake.notify(error, hapiRequest)
should.not.exist result
| true | should = require('chai').should()
Airbrake = require '../../lib/airbrake'
Info = require '../airbrake_info.json'
describe 'Airbrake', ->
it 'should exist', ->
should.exist Airbrake
it 'should be creatable', ->
server = on: ->
airbrake = new Airbrake(server)
should.exist airbrake
describe 'creating', ->
it 'should set the properties', ->
airbrake = new Airbrake(on: (->), {
id: '1'
key: 'PI:KEY:<KEY>END_PI'
version: '0.0'
name: 'testing'
})
airbrake.id.should.equal '1'
airbrake.key.should.equal 'PI:KEY:<KEY>END_PI'
airbrake.version.should.equal '0.0'
airbrake.name.should.equal 'testing'
airbrake.os.should.be.ok
airbrake.language.should.contain 'Node'
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should get the git version if in git', (done)->
airbrake = new Airbrake(on: (->), {
id: '1'
key: '2'
})
airbrake.start ->
airbrake.version.should.have.length 40
airbrake.version.should.be.ok
done()
describe 'An Airbrake instance', ->
it 'should fail to start with no id/key', (done)->
airbrake = new Airbrake(on: ->)
airbrake.start (err)->
err.message.should.equal 'No Project ID or API Key was given!'
done()
it 'should not crash when getting context', ->
airbrake = new Airbrake(on: (->), config: Info)
airbrake.os = undefined
airbrake.environment = undefined
airbrake.language = undefined
airbrake.rootDirectory = undefined
airbrake.context()
airbrake.os.should.be.ok
airbrake.language.should.be.ok
airbrake.environment.should.be.ok
airbrake.rootDirectory.should.be.ok
it 'should return nil when in development', ->
airbrake = new Airbrake(on: (->), config: Info)
error = new Error('It Broke Here')
hapiRequest =
url:
path: '/testing/test/t'
result = airbrake.notify(error, hapiRequest)
should.not.exist result
|
[
{
"context": "on-4\nexports.test_vectors = \n s_4_1 :\n key : '000102030405060708090A0B0C0D0E0F'\n plaintext : '00112233445566778899AABBCCDDEEF",
"end": 168,
"score": 0.9997614622116089,
"start": 136,
"tag": "KEY",
"value": "000102030405060708090A0B0C0D0E0F"
},
{
"context": "A... | test/data/rfc3394.iced | samkenxstream/kbpgp | 464 |
# Test vectors from RFC 3394, Section 4
# http://tools.ietf.org/html/rfc3394#section-4
exports.test_vectors =
s_4_1 :
key : '000102030405060708090A0B0C0D0E0F'
plaintext : '00112233445566778899AABBCCDDEEFF'
ciphertext : '1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5'
s_4_2:
key : '000102030405060708090A0B0C0D0E0F1011121314151617'
plaintext : '00112233445566778899AABBCCDDEEFF'
ciphertext : '96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D'
s_4_3 :
key : '000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F'
plaintext : '00112233445566778899AABBCCDDEEFF'
ciphertext : '64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7'
s_4_4 :
key : '000102030405060708090A0B0C0D0E0F1011121314151617',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : '031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2'
s_4_5 :
key : '000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : 'A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1'
s_4_6 :
key : '000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F',
plaintext : '00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F',
ciphertext : '28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21'
| 64007 |
# Test vectors from RFC 3394, Section 4
# http://tools.ietf.org/html/rfc3394#section-4
exports.test_vectors =
s_4_1 :
key : '<KEY>'
plaintext : '00112233445566778899<KEY>EFF'
ciphertext : '1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5'
s_4_2:
key : '<KEY>'
plaintext : '0011223344556677889<KEY>EFF'
ciphertext : '96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D'
s_4_3 :
key : '<KEY>'
plaintext : '00112233445566778899AABBCCDDEEFF'
ciphertext : '64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7'
s_4_4 :
key : '<KEY>',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : '031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2'
s_4_5 :
key : '<KEY>',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : 'A8F9BC1612C68B3FF6E6F4FBE30E<KEY>E4769C<KEY>32CB8<KEY>CD<KEY>D<KEY>7D6<KEY>'
s_4_6 :
key : '<KEY>',
plaintext : '00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F',
ciphertext : '28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21'
| true |
# Test vectors from RFC 3394, Section 4
# http://tools.ietf.org/html/rfc3394#section-4
exports.test_vectors =
s_4_1 :
key : 'PI:KEY:<KEY>END_PI'
plaintext : '00112233445566778899PI:KEY:<KEY>END_PIEFF'
ciphertext : '1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5'
s_4_2:
key : 'PI:KEY:<KEY>END_PI'
plaintext : '0011223344556677889PI:KEY:<KEY>END_PIEFF'
ciphertext : '96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D'
s_4_3 :
key : 'PI:KEY:<KEY>END_PI'
plaintext : '00112233445566778899AABBCCDDEEFF'
ciphertext : '64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7'
s_4_4 :
key : 'PI:KEY:<KEY>END_PI',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : '031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2'
s_4_5 :
key : 'PI:KEY:<KEY>END_PI',
plaintext : '00112233445566778899AABBCCDDEEFF0001020304050607',
ciphertext : 'A8F9BC1612C68B3FF6E6F4FBE30EPI:KEY:<KEY>END_PIE4769CPI:KEY:<KEY>END_PI32CB8PI:KEY:<KEY>END_PICDPI:KEY:<KEY>END_PIDPI:KEY:<KEY>END_PI7D6PI:KEY:<KEY>END_PI'
s_4_6 :
key : 'PI:KEY:<KEY>END_PI',
plaintext : '00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F',
ciphertext : '28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21'
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998865127563477,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Je... | node_modules/konsserto/lib/src/Konsserto/Component/Router/RouteDefinition.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
RouteArgument = use('@Konsserto/Component/Router/RouteArgument')
# RouteDefinition
# @author Jessym Reziga <jessym@konsserto.com>
class RouteDefinition
# Class constructor
# @param {Object} registeredRoute The registered route in the config/route file
# @param {String} prefix The prefix of the route
constructor: (@registeredRoute, prefix) ->
@defaultArgs = {}
@arguments = {}
controller = @registeredRoute.controller.split ':'
@name = @registeredRoute.name
@bundleName = controller[0]
@controllerName = controller[1]
@methodName = controller[2]
@async = @registeredRoute.async ? false
@prefix = prefix
@rawPattern = @registeredRoute.pattern
@httpMethod = @registeredRoute.method ? 'all'
@rawArguments = @registeredRoute.arguments ? {}
@needPrefix = true
@needPrefix = false if @registeredRoute.prefix? && !@registeredRoute.prefix
@extractArgumentsFromRawPattern()
@pattern = @addPrefixOnRoute(prefix)
# Add an argument to the route
# @param {String} name The name of the parameter
# @retturn {RouteArgument} The route argument object
addArgument: (name) ->
requirements = @rawArguments[name] ? {}
routeArgument = new RouteArgument(name, requirements)
@arguments[name] = routeArgument
if routeArgument.hasDefault()
@defaultArgs[name] = routeArgument
return routeArgument
# Extract the arguments from the raw pattern
extractArgumentsFromRawPattern: () ->
@pattern = @rawPattern
args = @rawPattern.match(/({([a-zA-Z0-9-_]+)}+)/g)
if args?
for arg in args
name = arg.replace(/{/g, '').replace(/}/g, '')
routeArgument = @addArgument(name)
@pattern = @pattern.replace(arg, routeArgument.getMatch())
# Add the prefix on the routes
# @param {String} prefix The prefix to add
# @return {String} The prefix + the pattern
addPrefixOnRoute: (prefix) ->
if prefix? && @needPrefix
return prefix + @pattern
return @pattern
# @param {String} pattern The pattern of the route
setPattern: (@pattern) ->
return this
# @param {String} bundleName The bundle name associated with the route
setBundleName: (@bundleName) ->
return this
# @param {String} controllerName The controller name associated with the route
setControllerName: (@controllerName) ->
return this
# @param {String} methodName The method name associated with the route
setMethodName: (@methodName) ->
return this
# @param {Boolean} async The route asynchronous state
setAsync: (@async) ->
return this
# @param {String} httpMethod The http method associated with the route
setHttpMethod: (@httpMethod) ->
return this
# @param {String} name The name of the route
setName: (@name) ->
return this
# @return {Boolean} Is the route asynchroneous ?
isAsynchronous: () ->
return @async
# @return {String} The pattern of the route
getPattern: () ->
return @pattern
# @return {String} The raw pattern of the route
getRawPattern: () ->
return @rawPattern
# @return {String} The bundle name associated with the route
getBundleName: () ->
return @bundleName
# @return {String} The controller name associated with the route
getControllerName: () ->
return @controllerName
# @return {String} The method name associated with the route
getMethodName: () ->
return @methodName
# @return {String} The http method associated with the route
getHttpMethod: () ->
return @httpMethod
# @return {String} The name of the route
getName: () ->
return @name
# @return [String] The default arguments of the route
getDefaultArgs: () ->
return @defaultArgs
# @return [RouteArgument] The route arguments of the route
getArgs: () ->
return @arguments
# @param {String} name The name of the argument
# @return {String} The default value for an argument
getDefaultValueForArg: (name) ->
if @defaultArgs[name]?
return @defaultArgs[name].getDefaultValue()
return null
# @param {String} name The name of the argument
# @return {Boolean} Does the route contain a default arguments called 'name' ?
hasDefaultArg: (name) ->
if @defaultArgs[name]?
return true
return false
# Inflate the arguments
# @param [String] args The arguments
# @return {String} The path of the argument matching
inflateArguments: (args) ->
path = @prefix + @rawPattern
for name,arg of @arguments
fullname = '{' + name + '}'
if args? && args[name]?
path = path.replace(fullname, args[name])
else if @hasDefaultArg(name)
path = path.replace(fullname, @getDefaultValueForArg(name))
else
path = path.replace(fullname, '')
return path
module.exports = RouteDefinition | 172371 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
RouteArgument = use('@Konsserto/Component/Router/RouteArgument')
# RouteDefinition
# @author <NAME> <<EMAIL>>
class RouteDefinition
# Class constructor
# @param {Object} registeredRoute The registered route in the config/route file
# @param {String} prefix The prefix of the route
constructor: (@registeredRoute, prefix) ->
@defaultArgs = {}
@arguments = {}
controller = @registeredRoute.controller.split ':'
@name = @registeredRoute.name
@bundleName = controller[0]
@controllerName = controller[1]
@methodName = controller[2]
@async = @registeredRoute.async ? false
@prefix = prefix
@rawPattern = @registeredRoute.pattern
@httpMethod = @registeredRoute.method ? 'all'
@rawArguments = @registeredRoute.arguments ? {}
@needPrefix = true
@needPrefix = false if @registeredRoute.prefix? && !@registeredRoute.prefix
@extractArgumentsFromRawPattern()
@pattern = @addPrefixOnRoute(prefix)
# Add an argument to the route
# @param {String} name The name of the parameter
# @retturn {RouteArgument} The route argument object
addArgument: (name) ->
requirements = @rawArguments[name] ? {}
routeArgument = new RouteArgument(name, requirements)
@arguments[name] = routeArgument
if routeArgument.hasDefault()
@defaultArgs[name] = routeArgument
return routeArgument
# Extract the arguments from the raw pattern
extractArgumentsFromRawPattern: () ->
@pattern = @rawPattern
args = @rawPattern.match(/({([a-zA-Z0-9-_]+)}+)/g)
if args?
for arg in args
name = arg.replace(/{/g, '').replace(/}/g, '')
routeArgument = @addArgument(name)
@pattern = @pattern.replace(arg, routeArgument.getMatch())
# Add the prefix on the routes
# @param {String} prefix The prefix to add
# @return {String} The prefix + the pattern
addPrefixOnRoute: (prefix) ->
if prefix? && @needPrefix
return prefix + @pattern
return @pattern
# @param {String} pattern The pattern of the route
setPattern: (@pattern) ->
return this
# @param {String} bundleName The bundle name associated with the route
setBundleName: (@bundleName) ->
return this
# @param {String} controllerName The controller name associated with the route
setControllerName: (@controllerName) ->
return this
# @param {String} methodName The method name associated with the route
setMethodName: (@methodName) ->
return this
# @param {Boolean} async The route asynchronous state
setAsync: (@async) ->
return this
# @param {String} httpMethod The http method associated with the route
setHttpMethod: (@httpMethod) ->
return this
# @param {String} name The name of the route
setName: (@name) ->
return this
# @return {Boolean} Is the route asynchroneous ?
isAsynchronous: () ->
return @async
# @return {String} The pattern of the route
getPattern: () ->
return @pattern
# @return {String} The raw pattern of the route
getRawPattern: () ->
return @rawPattern
# @return {String} The bundle name associated with the route
getBundleName: () ->
return @bundleName
# @return {String} The controller name associated with the route
getControllerName: () ->
return @controllerName
# @return {String} The method name associated with the route
getMethodName: () ->
return @methodName
# @return {String} The http method associated with the route
getHttpMethod: () ->
return @httpMethod
# @return {String} The name of the route
getName: () ->
return @name
# @return [String] The default arguments of the route
getDefaultArgs: () ->
return @defaultArgs
# @return [RouteArgument] The route arguments of the route
getArgs: () ->
return @arguments
# @param {String} name The name of the argument
# @return {String} The default value for an argument
getDefaultValueForArg: (name) ->
if @defaultArgs[name]?
return @defaultArgs[name].getDefaultValue()
return null
# @param {String} name The name of the argument
# @return {Boolean} Does the route contain a default arguments called 'name' ?
hasDefaultArg: (name) ->
if @defaultArgs[name]?
return true
return false
# Inflate the arguments
# @param [String] args The arguments
# @return {String} The path of the argument matching
inflateArguments: (args) ->
path = @prefix + @rawPattern
for name,arg of @arguments
fullname = '{' + name + '}'
if args? && args[name]?
path = path.replace(fullname, args[name])
else if @hasDefaultArg(name)
path = path.replace(fullname, @getDefaultValueForArg(name))
else
path = path.replace(fullname, '')
return path
module.exports = RouteDefinition | true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
RouteArgument = use('@Konsserto/Component/Router/RouteArgument')
# RouteDefinition
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
class RouteDefinition
# Class constructor
# @param {Object} registeredRoute The registered route in the config/route file
# @param {String} prefix The prefix of the route
constructor: (@registeredRoute, prefix) ->
@defaultArgs = {}
@arguments = {}
controller = @registeredRoute.controller.split ':'
@name = @registeredRoute.name
@bundleName = controller[0]
@controllerName = controller[1]
@methodName = controller[2]
@async = @registeredRoute.async ? false
@prefix = prefix
@rawPattern = @registeredRoute.pattern
@httpMethod = @registeredRoute.method ? 'all'
@rawArguments = @registeredRoute.arguments ? {}
@needPrefix = true
@needPrefix = false if @registeredRoute.prefix? && !@registeredRoute.prefix
@extractArgumentsFromRawPattern()
@pattern = @addPrefixOnRoute(prefix)
# Add an argument to the route
# @param {String} name The name of the parameter
# @retturn {RouteArgument} The route argument object
addArgument: (name) ->
requirements = @rawArguments[name] ? {}
routeArgument = new RouteArgument(name, requirements)
@arguments[name] = routeArgument
if routeArgument.hasDefault()
@defaultArgs[name] = routeArgument
return routeArgument
# Extract the arguments from the raw pattern
extractArgumentsFromRawPattern: () ->
@pattern = @rawPattern
args = @rawPattern.match(/({([a-zA-Z0-9-_]+)}+)/g)
if args?
for arg in args
name = arg.replace(/{/g, '').replace(/}/g, '')
routeArgument = @addArgument(name)
@pattern = @pattern.replace(arg, routeArgument.getMatch())
# Add the prefix on the routes
# @param {String} prefix The prefix to add
# @return {String} The prefix + the pattern
addPrefixOnRoute: (prefix) ->
if prefix? && @needPrefix
return prefix + @pattern
return @pattern
# @param {String} pattern The pattern of the route
setPattern: (@pattern) ->
return this
# @param {String} bundleName The bundle name associated with the route
setBundleName: (@bundleName) ->
return this
# @param {String} controllerName The controller name associated with the route
setControllerName: (@controllerName) ->
return this
# @param {String} methodName The method name associated with the route
setMethodName: (@methodName) ->
return this
# @param {Boolean} async The route asynchronous state
setAsync: (@async) ->
return this
# @param {String} httpMethod The http method associated with the route
setHttpMethod: (@httpMethod) ->
return this
# @param {String} name The name of the route
setName: (@name) ->
return this
# @return {Boolean} Is the route asynchroneous ?
isAsynchronous: () ->
return @async
# @return {String} The pattern of the route
getPattern: () ->
return @pattern
# @return {String} The raw pattern of the route
getRawPattern: () ->
return @rawPattern
# @return {String} The bundle name associated with the route
getBundleName: () ->
return @bundleName
# @return {String} The controller name associated with the route
getControllerName: () ->
return @controllerName
# @return {String} The method name associated with the route
getMethodName: () ->
return @methodName
# @return {String} The http method associated with the route
getHttpMethod: () ->
return @httpMethod
# @return {String} The name of the route
getName: () ->
return @name
# @return [String] The default arguments of the route
getDefaultArgs: () ->
return @defaultArgs
# @return [RouteArgument] The route arguments of the route
getArgs: () ->
return @arguments
# @param {String} name The name of the argument
# @return {String} The default value for an argument
getDefaultValueForArg: (name) ->
if @defaultArgs[name]?
return @defaultArgs[name].getDefaultValue()
return null
# @param {String} name The name of the argument
# @return {Boolean} Does the route contain a default arguments called 'name' ?
hasDefaultArg: (name) ->
if @defaultArgs[name]?
return true
return false
# Inflate the arguments
# @param [String] args The arguments
# @return {String} The path of the argument matching
inflateArguments: (args) ->
path = @prefix + @rawPattern
for name,arg of @arguments
fullname = '{' + name + '}'
if args? && args[name]?
path = path.replace(fullname, args[name])
else if @hasDefaultArg(name)
path = path.replace(fullname, @getDefaultValueForArg(name))
else
path = path.replace(fullname, '')
return path
module.exports = RouteDefinition |
[
{
"context": "t aliases - lists all etcd aliases\n#\n# Author:\n# Chris Riddle\n\n_ = require \"underscore\"\n\nmodule.exports = (robo",
"end": 449,
"score": 0.9998595118522644,
"start": 437,
"tag": "NAME",
"value": "Chris Riddle"
}
] | src/etcd.coffee | christriddle/hubot-etcd | 2 | # Description:
# A Hubot script for querying an Etcd cluster
#
# Dependencies:
# None
#
# Commands:
# hubot etcd <alias> cluster health - show cluster health
# hubot etcd <alias> get <key> - gets the value of a key
# hubot etcd add alias <alias> <etcd_host> <etcd_port> - add an alias to a Etcd cluster
# hubot etcd remove alias <alias> - removes an alias
# hubot etcd list aliases - lists all etcd aliases
#
# Author:
# Chris Riddle
_ = require "underscore"
module.exports = (robot) ->
urlTemplate = "http://%s:%s/"
aliases = {}
getUrl = (alias, rest) ->
data = aliases[alias]
if !data?
return
host = data.host
port = data.port
return "http://" + host + ":" + port + rest
robot.brain.on 'loaded', () ->
aliases = robot.brain.data.etcdAliases || {}
robot.respond /etcd (\S+) cluster health$/i, (msg) ->
alias = msg.match[1]
url = getUrl alias "/stats/self" # ?
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting cluster health: " + err
return
bodyJson = JSON.parse(body)
if bodyJson.errorCode?
# todo
return
msg.send "Cluster health comming soon to a robot near you"
robot.respond /etcd (\S+) get (\S+)$/i, (msg) ->
key = msg.match[2]
alias = msg.match[1]
keyPath = "/v2/keys/" + key
url = getUrl alias, keyPath
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting key [" + key + "]: " + err
return
msg.send "/code #{body}"
robot.respond /etcd add alias (\S+) (\S+) (\S+)$/i, (msg) ->
alias = msg.match[1]
host = msg.match[2]
port = msg.match[3]
aliases[alias] = { host: host, port: port }
robot.brain.data.etcdAliases = aliases
msg.send "Alias added"
robot.respond /etcd remove alias (\S+)$/i, (msg) ->
alias = msg.match[1]
delete aliases[alias]
robot.brain.data.etcdAliases = aliases
msg.send "Alias removed"
robot.respond /etcd (list|show) aliases/i, (msg) ->
pretty = JSON.stringify aliases
msg.send "/code #{pretty}"
| 165263 | # Description:
# A Hubot script for querying an Etcd cluster
#
# Dependencies:
# None
#
# Commands:
# hubot etcd <alias> cluster health - show cluster health
# hubot etcd <alias> get <key> - gets the value of a key
# hubot etcd add alias <alias> <etcd_host> <etcd_port> - add an alias to a Etcd cluster
# hubot etcd remove alias <alias> - removes an alias
# hubot etcd list aliases - lists all etcd aliases
#
# Author:
# <NAME>
_ = require "underscore"
module.exports = (robot) ->
urlTemplate = "http://%s:%s/"
aliases = {}
getUrl = (alias, rest) ->
data = aliases[alias]
if !data?
return
host = data.host
port = data.port
return "http://" + host + ":" + port + rest
robot.brain.on 'loaded', () ->
aliases = robot.brain.data.etcdAliases || {}
robot.respond /etcd (\S+) cluster health$/i, (msg) ->
alias = msg.match[1]
url = getUrl alias "/stats/self" # ?
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting cluster health: " + err
return
bodyJson = JSON.parse(body)
if bodyJson.errorCode?
# todo
return
msg.send "Cluster health comming soon to a robot near you"
robot.respond /etcd (\S+) get (\S+)$/i, (msg) ->
key = msg.match[2]
alias = msg.match[1]
keyPath = "/v2/keys/" + key
url = getUrl alias, keyPath
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting key [" + key + "]: " + err
return
msg.send "/code #{body}"
robot.respond /etcd add alias (\S+) (\S+) (\S+)$/i, (msg) ->
alias = msg.match[1]
host = msg.match[2]
port = msg.match[3]
aliases[alias] = { host: host, port: port }
robot.brain.data.etcdAliases = aliases
msg.send "Alias added"
robot.respond /etcd remove alias (\S+)$/i, (msg) ->
alias = msg.match[1]
delete aliases[alias]
robot.brain.data.etcdAliases = aliases
msg.send "Alias removed"
robot.respond /etcd (list|show) aliases/i, (msg) ->
pretty = JSON.stringify aliases
msg.send "/code #{pretty}"
| true | # Description:
# A Hubot script for querying an Etcd cluster
#
# Dependencies:
# None
#
# Commands:
# hubot etcd <alias> cluster health - show cluster health
# hubot etcd <alias> get <key> - gets the value of a key
# hubot etcd add alias <alias> <etcd_host> <etcd_port> - add an alias to a Etcd cluster
# hubot etcd remove alias <alias> - removes an alias
# hubot etcd list aliases - lists all etcd aliases
#
# Author:
# PI:NAME:<NAME>END_PI
_ = require "underscore"
module.exports = (robot) ->
urlTemplate = "http://%s:%s/"
aliases = {}
getUrl = (alias, rest) ->
data = aliases[alias]
if !data?
return
host = data.host
port = data.port
return "http://" + host + ":" + port + rest
robot.brain.on 'loaded', () ->
aliases = robot.brain.data.etcdAliases || {}
robot.respond /etcd (\S+) cluster health$/i, (msg) ->
alias = msg.match[1]
url = getUrl alias "/stats/self" # ?
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting cluster health: " + err
return
bodyJson = JSON.parse(body)
if bodyJson.errorCode?
# todo
return
msg.send "Cluster health comming soon to a robot near you"
robot.respond /etcd (\S+) get (\S+)$/i, (msg) ->
key = msg.match[2]
alias = msg.match[1]
keyPath = "/v2/keys/" + key
url = getUrl alias, keyPath
if !url?
msg.send "No such alias: " + alias
return
msg.http(url).get() (err, res, body) ->
if err?
msg.send "Error getting key [" + key + "]: " + err
return
msg.send "/code #{body}"
robot.respond /etcd add alias (\S+) (\S+) (\S+)$/i, (msg) ->
alias = msg.match[1]
host = msg.match[2]
port = msg.match[3]
aliases[alias] = { host: host, port: port }
robot.brain.data.etcdAliases = aliases
msg.send "Alias added"
robot.respond /etcd remove alias (\S+)$/i, (msg) ->
alias = msg.match[1]
delete aliases[alias]
robot.brain.data.etcdAliases = aliases
msg.send "Alias removed"
robot.respond /etcd (list|show) aliases/i, (msg) ->
pretty = JSON.stringify aliases
msg.send "/code #{pretty}"
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998888373374939,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhaki... | src/app/controllers/pharmacyController.coffee | AbdelhakimRafik/Pharmalogy-API | 0 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date Mar 2021
###
Validator = require 'validatorjs'
Pharmacy = require '../models/pharmacy'
{ sequelize } = require '../../database'
module.exports.create = (req, res) ->
# pharmacy data validation rules
validationRules =
name: 'required|string|min:5'
addresse: 'string'
email: 'string|email'
webSite: 'string|'
phone: 'string|size:10'
city: 'required|string'
country: 'required|string'
longitude: 'required|string'
latitude: 'required|string'
# validate request data
validation = new Validator req.body, validationRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
# add pharmacy to database
pharmacy = await Pharmacy.create req.body
# check if pharmacy created
if pharmacy
res.status(200).json
message: "Pharmacy created successfully"
id: pharmacy.id
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.nameValidation = (req, res) ->
# pharmacy name validation rule
validationRule =
name: 'required|string|min:5'
# validate request data
validation = new Validator req.body, validationRule
# when name not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# name format are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
res.status(200).json
message: "Pharmacy name available"
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.getLocations = (req, res) ->
# data validation rules
validationRules =
city: 'string|required_without:location'
location:
latitude: 'numeric|required_with:longitude'
longitude: 'numeric|required_with:latitude'
range: 'numeric'
# validate request data
validation = new Validator req.body, validationRules
# when data error validation occured
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
else
console.log 'Validated'
# get data from database by city name
if req.body.city
pharmacies = await Pharmacy.findAll where: city: req.body.city
# get data from database by location range
else if req.body.location
req.body.range ?= 5 # set default value if undefined
pharmacies = await sequelize.query "SELECT *, ( 6371 * acos( cos( radians(#{req.body.location.latitude}) ) * cos( radians( latitude ) ) * cos( radians( longitude ) - radians(#{req.body.location.longitude}) ) + sin( radians(#{req.body.location.latitude}) ) * sin( radians( latitude ) ) ) ) AS distance FROM pharmacies HAVING distance < #{req.body.location.range}"
pharmacies = pharmacies[0]
if pharmacies
res.status(200).json pharmacies
else
res.status(400).json
message: 'An error occured while fetching data'
return | 168620 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date Mar 2021
###
Validator = require 'validatorjs'
Pharmacy = require '../models/pharmacy'
{ sequelize } = require '../../database'
module.exports.create = (req, res) ->
# pharmacy data validation rules
validationRules =
name: 'required|string|min:5'
addresse: 'string'
email: 'string|email'
webSite: 'string|'
phone: 'string|size:10'
city: 'required|string'
country: 'required|string'
longitude: 'required|string'
latitude: 'required|string'
# validate request data
validation = new Validator req.body, validationRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
# add pharmacy to database
pharmacy = await Pharmacy.create req.body
# check if pharmacy created
if pharmacy
res.status(200).json
message: "Pharmacy created successfully"
id: pharmacy.id
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.nameValidation = (req, res) ->
# pharmacy name validation rule
validationRule =
name: 'required|string|min:5'
# validate request data
validation = new Validator req.body, validationRule
# when name not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# name format are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
res.status(200).json
message: "Pharmacy name available"
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.getLocations = (req, res) ->
# data validation rules
validationRules =
city: 'string|required_without:location'
location:
latitude: 'numeric|required_with:longitude'
longitude: 'numeric|required_with:latitude'
range: 'numeric'
# validate request data
validation = new Validator req.body, validationRules
# when data error validation occured
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
else
console.log 'Validated'
# get data from database by city name
if req.body.city
pharmacies = await Pharmacy.findAll where: city: req.body.city
# get data from database by location range
else if req.body.location
req.body.range ?= 5 # set default value if undefined
pharmacies = await sequelize.query "SELECT *, ( 6371 * acos( cos( radians(#{req.body.location.latitude}) ) * cos( radians( latitude ) ) * cos( radians( longitude ) - radians(#{req.body.location.longitude}) ) + sin( radians(#{req.body.location.latitude}) ) * sin( radians( latitude ) ) ) ) AS distance FROM pharmacies HAVING distance < #{req.body.location.range}"
pharmacies = pharmacies[0]
if pharmacies
res.status(200).json pharmacies
else
res.status(400).json
message: 'An error occured while fetching data'
return | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date Mar 2021
###
Validator = require 'validatorjs'
Pharmacy = require '../models/pharmacy'
{ sequelize } = require '../../database'
module.exports.create = (req, res) ->
# pharmacy data validation rules
validationRules =
name: 'required|string|min:5'
addresse: 'string'
email: 'string|email'
webSite: 'string|'
phone: 'string|size:10'
city: 'required|string'
country: 'required|string'
longitude: 'required|string'
latitude: 'required|string'
# validate request data
validation = new Validator req.body, validationRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
# add pharmacy to database
pharmacy = await Pharmacy.create req.body
# check if pharmacy created
if pharmacy
res.status(200).json
message: "Pharmacy created successfully"
id: pharmacy.id
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.nameValidation = (req, res) ->
# pharmacy name validation rule
validationRule =
name: 'required|string|min:5'
# validate request data
validation = new Validator req.body, validationRule
# when name not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# name format are validated
else
# check if pharmacy name already registred
pharmacy = await Pharmacy.findOne where: name: req.body.name
# pharmacy not found with same name
unless pharmacy
res.status(200).json
message: "Pharmacy name available"
# pharmacy with same name found
else
res.status(401).json
message: "Pharmacy name already exists"
return
module.exports.getLocations = (req, res) ->
# data validation rules
validationRules =
city: 'string|required_without:location'
location:
latitude: 'numeric|required_with:longitude'
longitude: 'numeric|required_with:latitude'
range: 'numeric'
# validate request data
validation = new Validator req.body, validationRules
# when data error validation occured
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
else
console.log 'Validated'
# get data from database by city name
if req.body.city
pharmacies = await Pharmacy.findAll where: city: req.body.city
# get data from database by location range
else if req.body.location
req.body.range ?= 5 # set default value if undefined
pharmacies = await sequelize.query "SELECT *, ( 6371 * acos( cos( radians(#{req.body.location.latitude}) ) * cos( radians( latitude ) ) * cos( radians( longitude ) - radians(#{req.body.location.longitude}) ) + sin( radians(#{req.body.location.latitude}) ) * sin( radians( latitude ) ) ) ) AS distance FROM pharmacies HAVING distance < #{req.body.location.range}"
pharmacies = pharmacies[0]
if pharmacies
res.status(200).json pharmacies
else
res.status(400).json
message: 'An error occured while fetching data'
return |
[
{
"context": "ript Client Detection\"\n # (C) viazenetti GmbH (Christian Ludwig)\n\n unknown = '-'\n #browser\n nVer = navig",
"end": 1160,
"score": 0.9998834133148193,
"start": 1144,
"tag": "NAME",
"value": "Christian Ludwig"
}
] | src/SystemInfo.coffee | intensifier/Fizzygum | 110 | # this file is excluded from the fizzygum homepage build
# Holds information about browser and machine
# Note that some of these could
# change during user session.
class SystemInfo
userAgent: nil
screenWidth: nil
screenHeight: nil
screenColorDepth: nil
screenPixelRatio: nil
screenCeilPixelRatio: nil
appCodeName: nil
appName: nil
appVersion: nil
cookieEnabled: nil
platform: nil
systemLanguage: nil
browser: nil
browserVersion: nil
mobile: nil
os: nil
osVersion: nil
cookies: nil
constructor: ->
@userAgent = navigator.userAgent
@screenWidth = window.screen.width
@screenHeight = window.screen.height
@screenColorDepth = window.screen.colorDepth
@screenPixelRatio = window.devicePixelRatio
@screenCeilPixelRatio = Math.ceil window.devicePixelRatio
@appCodeName = navigator.appCodeName
@appName = navigator.appName
@appVersion = navigator.appVersion
@cookieEnabled = navigator.cookieEnabled
@platform = navigator.platform
@systemLanguage = navigator.systemLanguage
# code here is from "JavaScript Client Detection"
# (C) viazenetti GmbH (Christian Ludwig)
unknown = '-'
#browser
nVer = navigator.appVersion
nAgt = navigator.userAgent
browser = navigator.appName
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
nameOffset = undefined
verOffset = undefined
ix = undefined
# Opera
if (verOffset = nAgt.indexOf 'Opera') != -1
browser = 'Opera'
version = nAgt.substring verOffset + 6
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'MSIE') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring verOffset + 5
else if (verOffset = nAgt.indexOf 'Chrome') != -1
browser = 'Chrome'
version = nAgt.substring verOffset + 7
else if (verOffset = nAgt.indexOf 'Safari') != -1
browser = 'Safari'
version = nAgt.substring verOffset + 7
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'Firefox') != -1
browser = 'Firefox'
version = nAgt.substring verOffset + 8
else if nAgt.indexOf('Trident/') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring(nAgt.indexOf('rv:') + 3)
else if (nameOffset = nAgt.lastIndexOf(' ') + 1) < (verOffset = nAgt.lastIndexOf('/'))
browser = nAgt.substring nameOffset, verOffset
version = nAgt.substring verOffset + 1
if browser.toLowerCase() == browser.toUpperCase()
browser = navigator.appName
# trim the version string
if (ix = version.indexOf ';') != -1
version = version.substring 0, ix
if (ix = version.indexOf ' ') != -1
version = version.substring 0, ix
if (ix = version.indexOf ')') != -1
version = version.substring 0, ix
majorVersion = parseInt '' + version, 10
if isNaN majorVersion
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
# mobile version
mobile = /Mobile|mini|Fennec|Android|iP(ad|od|hone)/.test(nVer)
# cookie
cookieEnabled = if navigator.cookieEnabled then true else false
if typeof navigator.cookieEnabled == 'undefined' and !cookieEnabled
document.cookie = 'testcookie'
cookieEnabled = document.cookie.includes('testcookie')
# system
os = unknown
clientStrings = [
{
s: 'Windows 3.11'
r: /Win16/
}
{
s: 'Windows 95'
r: /(Windows 95|Win95|Windows_95)/
}
{
s: 'Windows ME'
r: /(Win 9x 4.90|Windows ME)/
}
{
s: 'Windows 98'
r: /(Windows 98|Win98)/
}
{
s: 'Windows CE'
r: /Windows CE/
}
{
s: 'Windows 2000'
r: /(Windows NT 5.0|Windows 2000)/
}
{
s: 'Windows XP'
r: /(Windows NT 5.1|Windows XP)/
}
{
s: 'Windows Server 2003'
r: /Windows NT 5.2/
}
{
s: 'Windows Vista'
r: /Windows NT 6.0/
}
{
s: 'Windows 7'
r: /(Windows 7|Windows NT 6.1)/
}
{
s: 'Windows 8.1'
r: /(Windows 8.1|Windows NT 6.3)/
}
{
s: 'Windows 8'
r: /(Windows 8|Windows NT 6.2)/
}
{
s: 'Windows NT 4.0'
r: /(Windows NT 4.0|WinNT4.0|WinNT|Windows NT)/
}
{
s: 'Windows ME'
r: /Windows ME/
}
{
s: 'Android'
r: /Android/
}
{
s: 'Open BSD'
r: /OpenBSD/
}
{
s: 'Sun OS'
r: /SunOS/
}
{
s: 'Linux'
r: /(Linux|X11)/
}
{
s: 'iOS'
r: /(iPhone|iPad|iPod)/
}
{
s: 'Mac OS X'
r: /Mac OS X/
}
{
s: 'Mac OS'
r: /(MacPPC|MacIntel|Mac_PowerPC|Macintosh)/
}
{
s: 'QNX'
r: /QNX/
}
{
s: 'UNIX'
r: /UNIX/
}
{
s: 'BeOS'
r: /BeOS/
}
{
s: 'OS/2'
r: /OS\/2/
}
{
s: 'Search Bot'
r: /(nuhk|Googlebot|Yammybot|Openbot|Slurp|MSNBot|Ask Jeeves\/Teoma|ia_archiver)/
}
]
for id of clientStrings
cs = clientStrings[id]
if cs.r.test nAgt
os = cs.s
break
osVersion = unknown
if /Windows/.test(os)
osVersion = /Windows (.*)/.exec(os)[1]
os = 'Windows'
switch os
when 'Mac OS X'
osVersion = /Mac OS X (10[\._\d]+)/.exec(nAgt)[1]
when 'Android'
osVersion = /Android ([\._\d]+)/.exec(nAgt)[1]
when 'iOS'
osVersion = /OS (\d+)_(\d+)_?(\d+)?/.exec(nVer)
osVersion = osVersion[1] + '.' + osVersion[2] + '.' + (osVersion[3] | 0)
@browser = browser
@browserVersion = version
@mobile = mobile
@os = os
@osVersion = osVersion
@cookies = cookieEnabled
| 105204 | # this file is excluded from the fizzygum homepage build
# Holds information about browser and machine
# Note that some of these could
# change during user session.
class SystemInfo
userAgent: nil
screenWidth: nil
screenHeight: nil
screenColorDepth: nil
screenPixelRatio: nil
screenCeilPixelRatio: nil
appCodeName: nil
appName: nil
appVersion: nil
cookieEnabled: nil
platform: nil
systemLanguage: nil
browser: nil
browserVersion: nil
mobile: nil
os: nil
osVersion: nil
cookies: nil
constructor: ->
@userAgent = navigator.userAgent
@screenWidth = window.screen.width
@screenHeight = window.screen.height
@screenColorDepth = window.screen.colorDepth
@screenPixelRatio = window.devicePixelRatio
@screenCeilPixelRatio = Math.ceil window.devicePixelRatio
@appCodeName = navigator.appCodeName
@appName = navigator.appName
@appVersion = navigator.appVersion
@cookieEnabled = navigator.cookieEnabled
@platform = navigator.platform
@systemLanguage = navigator.systemLanguage
# code here is from "JavaScript Client Detection"
# (C) viazenetti GmbH (<NAME>)
unknown = '-'
#browser
nVer = navigator.appVersion
nAgt = navigator.userAgent
browser = navigator.appName
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
nameOffset = undefined
verOffset = undefined
ix = undefined
# Opera
if (verOffset = nAgt.indexOf 'Opera') != -1
browser = 'Opera'
version = nAgt.substring verOffset + 6
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'MSIE') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring verOffset + 5
else if (verOffset = nAgt.indexOf 'Chrome') != -1
browser = 'Chrome'
version = nAgt.substring verOffset + 7
else if (verOffset = nAgt.indexOf 'Safari') != -1
browser = 'Safari'
version = nAgt.substring verOffset + 7
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'Firefox') != -1
browser = 'Firefox'
version = nAgt.substring verOffset + 8
else if nAgt.indexOf('Trident/') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring(nAgt.indexOf('rv:') + 3)
else if (nameOffset = nAgt.lastIndexOf(' ') + 1) < (verOffset = nAgt.lastIndexOf('/'))
browser = nAgt.substring nameOffset, verOffset
version = nAgt.substring verOffset + 1
if browser.toLowerCase() == browser.toUpperCase()
browser = navigator.appName
# trim the version string
if (ix = version.indexOf ';') != -1
version = version.substring 0, ix
if (ix = version.indexOf ' ') != -1
version = version.substring 0, ix
if (ix = version.indexOf ')') != -1
version = version.substring 0, ix
majorVersion = parseInt '' + version, 10
if isNaN majorVersion
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
# mobile version
mobile = /Mobile|mini|Fennec|Android|iP(ad|od|hone)/.test(nVer)
# cookie
cookieEnabled = if navigator.cookieEnabled then true else false
if typeof navigator.cookieEnabled == 'undefined' and !cookieEnabled
document.cookie = 'testcookie'
cookieEnabled = document.cookie.includes('testcookie')
# system
os = unknown
clientStrings = [
{
s: 'Windows 3.11'
r: /Win16/
}
{
s: 'Windows 95'
r: /(Windows 95|Win95|Windows_95)/
}
{
s: 'Windows ME'
r: /(Win 9x 4.90|Windows ME)/
}
{
s: 'Windows 98'
r: /(Windows 98|Win98)/
}
{
s: 'Windows CE'
r: /Windows CE/
}
{
s: 'Windows 2000'
r: /(Windows NT 5.0|Windows 2000)/
}
{
s: 'Windows XP'
r: /(Windows NT 5.1|Windows XP)/
}
{
s: 'Windows Server 2003'
r: /Windows NT 5.2/
}
{
s: 'Windows Vista'
r: /Windows NT 6.0/
}
{
s: 'Windows 7'
r: /(Windows 7|Windows NT 6.1)/
}
{
s: 'Windows 8.1'
r: /(Windows 8.1|Windows NT 6.3)/
}
{
s: 'Windows 8'
r: /(Windows 8|Windows NT 6.2)/
}
{
s: 'Windows NT 4.0'
r: /(Windows NT 4.0|WinNT4.0|WinNT|Windows NT)/
}
{
s: 'Windows ME'
r: /Windows ME/
}
{
s: 'Android'
r: /Android/
}
{
s: 'Open BSD'
r: /OpenBSD/
}
{
s: 'Sun OS'
r: /SunOS/
}
{
s: 'Linux'
r: /(Linux|X11)/
}
{
s: 'iOS'
r: /(iPhone|iPad|iPod)/
}
{
s: 'Mac OS X'
r: /Mac OS X/
}
{
s: 'Mac OS'
r: /(MacPPC|MacIntel|Mac_PowerPC|Macintosh)/
}
{
s: 'QNX'
r: /QNX/
}
{
s: 'UNIX'
r: /UNIX/
}
{
s: 'BeOS'
r: /BeOS/
}
{
s: 'OS/2'
r: /OS\/2/
}
{
s: 'Search Bot'
r: /(nuhk|Googlebot|Yammybot|Openbot|Slurp|MSNBot|Ask Jeeves\/Teoma|ia_archiver)/
}
]
for id of clientStrings
cs = clientStrings[id]
if cs.r.test nAgt
os = cs.s
break
osVersion = unknown
if /Windows/.test(os)
osVersion = /Windows (.*)/.exec(os)[1]
os = 'Windows'
switch os
when 'Mac OS X'
osVersion = /Mac OS X (10[\._\d]+)/.exec(nAgt)[1]
when 'Android'
osVersion = /Android ([\._\d]+)/.exec(nAgt)[1]
when 'iOS'
osVersion = /OS (\d+)_(\d+)_?(\d+)?/.exec(nVer)
osVersion = osVersion[1] + '.' + osVersion[2] + '.' + (osVersion[3] | 0)
@browser = browser
@browserVersion = version
@mobile = mobile
@os = os
@osVersion = osVersion
@cookies = cookieEnabled
| true | # this file is excluded from the fizzygum homepage build
# Holds information about browser and machine
# Note that some of these could
# change during user session.
class SystemInfo
userAgent: nil
screenWidth: nil
screenHeight: nil
screenColorDepth: nil
screenPixelRatio: nil
screenCeilPixelRatio: nil
appCodeName: nil
appName: nil
appVersion: nil
cookieEnabled: nil
platform: nil
systemLanguage: nil
browser: nil
browserVersion: nil
mobile: nil
os: nil
osVersion: nil
cookies: nil
constructor: ->
@userAgent = navigator.userAgent
@screenWidth = window.screen.width
@screenHeight = window.screen.height
@screenColorDepth = window.screen.colorDepth
@screenPixelRatio = window.devicePixelRatio
@screenCeilPixelRatio = Math.ceil window.devicePixelRatio
@appCodeName = navigator.appCodeName
@appName = navigator.appName
@appVersion = navigator.appVersion
@cookieEnabled = navigator.cookieEnabled
@platform = navigator.platform
@systemLanguage = navigator.systemLanguage
# code here is from "JavaScript Client Detection"
# (C) viazenetti GmbH (PI:NAME:<NAME>END_PI)
unknown = '-'
#browser
nVer = navigator.appVersion
nAgt = navigator.userAgent
browser = navigator.appName
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
nameOffset = undefined
verOffset = undefined
ix = undefined
# Opera
if (verOffset = nAgt.indexOf 'Opera') != -1
browser = 'Opera'
version = nAgt.substring verOffset + 6
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'MSIE') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring verOffset + 5
else if (verOffset = nAgt.indexOf 'Chrome') != -1
browser = 'Chrome'
version = nAgt.substring verOffset + 7
else if (verOffset = nAgt.indexOf 'Safari') != -1
browser = 'Safari'
version = nAgt.substring verOffset + 7
if (verOffset = nAgt.indexOf 'Version') != -1
version = nAgt.substring verOffset + 8
else if (verOffset = nAgt.indexOf 'Firefox') != -1
browser = 'Firefox'
version = nAgt.substring verOffset + 8
else if nAgt.indexOf('Trident/') != -1
browser = 'Microsoft Internet Explorer'
version = nAgt.substring(nAgt.indexOf('rv:') + 3)
else if (nameOffset = nAgt.lastIndexOf(' ') + 1) < (verOffset = nAgt.lastIndexOf('/'))
browser = nAgt.substring nameOffset, verOffset
version = nAgt.substring verOffset + 1
if browser.toLowerCase() == browser.toUpperCase()
browser = navigator.appName
# trim the version string
if (ix = version.indexOf ';') != -1
version = version.substring 0, ix
if (ix = version.indexOf ' ') != -1
version = version.substring 0, ix
if (ix = version.indexOf ')') != -1
version = version.substring 0, ix
majorVersion = parseInt '' + version, 10
if isNaN majorVersion
version = '' + parseFloat navigator.appVersion
majorVersion = parseInt navigator.appVersion, 10
# mobile version
mobile = /Mobile|mini|Fennec|Android|iP(ad|od|hone)/.test(nVer)
# cookie
cookieEnabled = if navigator.cookieEnabled then true else false
if typeof navigator.cookieEnabled == 'undefined' and !cookieEnabled
document.cookie = 'testcookie'
cookieEnabled = document.cookie.includes('testcookie')
# system
os = unknown
clientStrings = [
{
s: 'Windows 3.11'
r: /Win16/
}
{
s: 'Windows 95'
r: /(Windows 95|Win95|Windows_95)/
}
{
s: 'Windows ME'
r: /(Win 9x 4.90|Windows ME)/
}
{
s: 'Windows 98'
r: /(Windows 98|Win98)/
}
{
s: 'Windows CE'
r: /Windows CE/
}
{
s: 'Windows 2000'
r: /(Windows NT 5.0|Windows 2000)/
}
{
s: 'Windows XP'
r: /(Windows NT 5.1|Windows XP)/
}
{
s: 'Windows Server 2003'
r: /Windows NT 5.2/
}
{
s: 'Windows Vista'
r: /Windows NT 6.0/
}
{
s: 'Windows 7'
r: /(Windows 7|Windows NT 6.1)/
}
{
s: 'Windows 8.1'
r: /(Windows 8.1|Windows NT 6.3)/
}
{
s: 'Windows 8'
r: /(Windows 8|Windows NT 6.2)/
}
{
s: 'Windows NT 4.0'
r: /(Windows NT 4.0|WinNT4.0|WinNT|Windows NT)/
}
{
s: 'Windows ME'
r: /Windows ME/
}
{
s: 'Android'
r: /Android/
}
{
s: 'Open BSD'
r: /OpenBSD/
}
{
s: 'Sun OS'
r: /SunOS/
}
{
s: 'Linux'
r: /(Linux|X11)/
}
{
s: 'iOS'
r: /(iPhone|iPad|iPod)/
}
{
s: 'Mac OS X'
r: /Mac OS X/
}
{
s: 'Mac OS'
r: /(MacPPC|MacIntel|Mac_PowerPC|Macintosh)/
}
{
s: 'QNX'
r: /QNX/
}
{
s: 'UNIX'
r: /UNIX/
}
{
s: 'BeOS'
r: /BeOS/
}
{
s: 'OS/2'
r: /OS\/2/
}
{
s: 'Search Bot'
r: /(nuhk|Googlebot|Yammybot|Openbot|Slurp|MSNBot|Ask Jeeves\/Teoma|ia_archiver)/
}
]
for id of clientStrings
cs = clientStrings[id]
if cs.r.test nAgt
os = cs.s
break
osVersion = unknown
if /Windows/.test(os)
osVersion = /Windows (.*)/.exec(os)[1]
os = 'Windows'
switch os
when 'Mac OS X'
osVersion = /Mac OS X (10[\._\d]+)/.exec(nAgt)[1]
when 'Android'
osVersion = /Android ([\._\d]+)/.exec(nAgt)[1]
when 'iOS'
osVersion = /OS (\d+)_(\d+)_?(\d+)?/.exec(nVer)
osVersion = osVersion[1] + '.' + osVersion[2] + '.' + (osVersion[3] | 0)
@browser = browser
@browserVersion = version
@mobile = mobile
@os = os
@osVersion = osVersion
@cookies = cookieEnabled
|
[
{
"context": " room.set(data)\n )\n\nroom = new Room(\n self: \"Joe\",\n name: \"My Room\",\n people: []\n)\n#console.log ",
"end": 433,
"score": 0.9933170676231384,
"start": 430,
"tag": "NAME",
"value": "Joe"
},
{
"context": "ta)\n )\n\nroom = new Room(\n self: \"Joe\",\n ... | assets/script.coffee | cespare/scrumcard | 0 | #_ = require './underscore'
#Backbone = require './backbone'
class Room extends Backbone.Model
defaults:
people: []
class RoomCollection extends Backbone.Collection
model: Room
class User extends Backbone.Model
defaults:
vote: -1
change_vote: (vote) ->
this.save({"vote": vote})
poll_room = (roomname) ->
$.get("api/rooms/#{roomname}",
(data) ->
room.set(data)
)
room = new Room(
self: "Joe",
name: "My Room",
people: []
)
#console.log room.get("user")
#console.log room.get("name")
#console.log room.get("people")
| 14660 | #_ = require './underscore'
#Backbone = require './backbone'
class Room extends Backbone.Model
defaults:
people: []
class RoomCollection extends Backbone.Collection
model: Room
class User extends Backbone.Model
defaults:
vote: -1
change_vote: (vote) ->
this.save({"vote": vote})
poll_room = (roomname) ->
$.get("api/rooms/#{roomname}",
(data) ->
room.set(data)
)
room = new Room(
self: "<NAME>",
name: "<NAME>",
people: []
)
#console.log room.get("user")
#console.log room.get("name")
#console.log room.get("people")
| true | #_ = require './underscore'
#Backbone = require './backbone'
class Room extends Backbone.Model
defaults:
people: []
class RoomCollection extends Backbone.Collection
model: Room
class User extends Backbone.Model
defaults:
vote: -1
change_vote: (vote) ->
this.save({"vote": vote})
poll_room = (roomname) ->
$.get("api/rooms/#{roomname}",
(data) ->
room.set(data)
)
room = new Room(
self: "PI:NAME:<NAME>END_PI",
name: "PI:NAME:<NAME>END_PI",
people: []
)
#console.log room.get("user")
#console.log room.get("name")
#console.log room.get("people")
|
[
{
"context": ".full-menu .header.list-item': [\n\t\t{\n\t\t\t'label': 'Bemy'\n\t\t\t'submenu': [\n\t\t\t\t{\n\t\t\t\t\t'label': 'Auto'\n\t\t\t\t\t",
"end": 111,
"score": 0.7511788606643677,
"start": 107,
"tag": "NAME",
"value": "Bemy"
}
] | menus/bemy.cson | frux/atom-bemy | 2 | 'context-menu':
'.tree-view.full-menu .entry, .tree-view.full-menu .header.list-item': [
{
'label': 'Bemy'
'submenu': [
{
'label': 'Auto'
'command': 'bemy:auto'
},
{
'label': 'Create'
'command': 'bemy:create'
}
]
}
]
'menu': [
{}
]
| 22825 | 'context-menu':
'.tree-view.full-menu .entry, .tree-view.full-menu .header.list-item': [
{
'label': '<NAME>'
'submenu': [
{
'label': 'Auto'
'command': 'bemy:auto'
},
{
'label': 'Create'
'command': 'bemy:create'
}
]
}
]
'menu': [
{}
]
| true | 'context-menu':
'.tree-view.full-menu .entry, .tree-view.full-menu .header.list-item': [
{
'label': 'PI:NAME:<NAME>END_PI'
'submenu': [
{
'label': 'Auto'
'command': 'bemy:auto'
},
{
'label': 'Create'
'command': 'bemy:create'
}
]
}
]
'menu': [
{}
]
|
[
{
"context": " by grab and drag scroll.\n# \n# Copyright (c) 2009 Toshimitsu Takahashi\n# Modified 2009 by Andrew Badr\n# \n# Released un",
"end": 145,
"score": 0.9998530149459839,
"start": 125,
"tag": "NAME",
"value": "Toshimitsu Takahashi"
},
{
"context": " (c) 2009 Toshimitsu T... | jotleaf/static/js/libs/jquery.scrollview.coffee | reverie/jotleaf.com | 1 | # ScrollView - jQuery plugin 0.1
#
# This plugin supplies contents view by grab and drag scroll.
#
# Copyright (c) 2009 Toshimitsu Takahashi
# Modified 2009 by Andrew Badr
#
# Released under the MIT license.
class DraggableSurface
constructor: (@container, @scrollBy) ->
@container = $(@container)
@active = true
@_isGrabbing = false
@_xp = @_yp = @_grabbedNode = null
@bindEvents()
dragClass: 'scrollview-dragging'
startgrab: (target) =>
@_isGrabbing = true
@_grabbedNode = target
@container.addClass(@dragClass)
@container.trigger('start-drag')
stopgrab: =>
@_isGrabbing = false
@container.removeClass(@dragClass)
@container.trigger('stop-drag')
@_grabbedNode = null
bindEvents: =>
@container.mousedown((e) =>
# Tracks how many pixels we 'just' moved, so we can check
# on a click event. Value should be zero if either the last
# mousedown didn't initiate a drag, or if it did but the user
# didn't move it [much].
@_pixelsMoved = 0
# Abort early if not active, so that we don't preventDefault
# on an event someone else wants
# Added right click detection to hijack those from chrome's
# finicky handling
if not (@active and (e.which ==1 or e.which==3))
return
# This really is a grab -- preventdefault so the browser doesn't
# search for text-selections to make while the mouse is down.
# (Big performance issue on Chrome, see:
# http://code.google.com/p/chromium/issues/detail?id=103148 )
e.preventDefault()
# only left clicks cause dragging
if e.which == 1
# Start grabbing
@startgrab(e.target)
@_xp = e.pageX
@_yp = e.pageY
)
@container.mousemove((e) =>
if not @_isGrabbing
return true
xDiff = @_xp - e.pageX
yDiff = @_yp - e.pageY
@scrollBy(xDiff, yDiff)
@_xp = e.pageX
@_yp = e.pageY
@_pixelsMoved += Math.abs(xDiff) + Math.abs(yDiff)
)
@container.on('mouseup mouseleave', @stopgrab)
$(window).on('blur', @stopgrab)
@container.click((e) =>
if @_pixelsMoved > 5
# If we drag the surface, but happened to click a link, don't trigger
# the link's default click handler. This depends on there being no
# more-specific click event handlers. One way to achieve this is by
# using jq 'live' events.
e.preventDefault()
e.stopPropagation()
)
enable: =>
@active = true
disable: =>
@active = false
@stopgrab()
destroy: =>
@unbindEvents()
unbindEvents: =>
@container.off('mousedown mousemove click mouseup mouseleave')
$(window).off('blur', @stopgrab) | 208518 | # ScrollView - jQuery plugin 0.1
#
# This plugin supplies contents view by grab and drag scroll.
#
# Copyright (c) 2009 <NAME>
# Modified 2009 by <NAME>
#
# Released under the MIT license.
class DraggableSurface
constructor: (@container, @scrollBy) ->
@container = $(@container)
@active = true
@_isGrabbing = false
@_xp = @_yp = @_grabbedNode = null
@bindEvents()
dragClass: 'scrollview-dragging'
startgrab: (target) =>
@_isGrabbing = true
@_grabbedNode = target
@container.addClass(@dragClass)
@container.trigger('start-drag')
stopgrab: =>
@_isGrabbing = false
@container.removeClass(@dragClass)
@container.trigger('stop-drag')
@_grabbedNode = null
bindEvents: =>
@container.mousedown((e) =>
# Tracks how many pixels we 'just' moved, so we can check
# on a click event. Value should be zero if either the last
# mousedown didn't initiate a drag, or if it did but the user
# didn't move it [much].
@_pixelsMoved = 0
# Abort early if not active, so that we don't preventDefault
# on an event someone else wants
# Added right click detection to hijack those from chrome's
# finicky handling
if not (@active and (e.which ==1 or e.which==3))
return
# This really is a grab -- preventdefault so the browser doesn't
# search for text-selections to make while the mouse is down.
# (Big performance issue on Chrome, see:
# http://code.google.com/p/chromium/issues/detail?id=103148 )
e.preventDefault()
# only left clicks cause dragging
if e.which == 1
# Start grabbing
@startgrab(e.target)
@_xp = e.pageX
@_yp = e.pageY
)
@container.mousemove((e) =>
if not @_isGrabbing
return true
xDiff = @_xp - e.pageX
yDiff = @_yp - e.pageY
@scrollBy(xDiff, yDiff)
@_xp = e.pageX
@_yp = e.pageY
@_pixelsMoved += Math.abs(xDiff) + Math.abs(yDiff)
)
@container.on('mouseup mouseleave', @stopgrab)
$(window).on('blur', @stopgrab)
@container.click((e) =>
if @_pixelsMoved > 5
# If we drag the surface, but happened to click a link, don't trigger
# the link's default click handler. This depends on there being no
# more-specific click event handlers. One way to achieve this is by
# using jq 'live' events.
e.preventDefault()
e.stopPropagation()
)
enable: =>
@active = true
disable: =>
@active = false
@stopgrab()
destroy: =>
@unbindEvents()
unbindEvents: =>
@container.off('mousedown mousemove click mouseup mouseleave')
$(window).off('blur', @stopgrab) | true | # ScrollView - jQuery plugin 0.1
#
# This plugin supplies contents view by grab and drag scroll.
#
# Copyright (c) 2009 PI:NAME:<NAME>END_PI
# Modified 2009 by PI:NAME:<NAME>END_PI
#
# Released under the MIT license.
class DraggableSurface
constructor: (@container, @scrollBy) ->
@container = $(@container)
@active = true
@_isGrabbing = false
@_xp = @_yp = @_grabbedNode = null
@bindEvents()
dragClass: 'scrollview-dragging'
startgrab: (target) =>
@_isGrabbing = true
@_grabbedNode = target
@container.addClass(@dragClass)
@container.trigger('start-drag')
stopgrab: =>
@_isGrabbing = false
@container.removeClass(@dragClass)
@container.trigger('stop-drag')
@_grabbedNode = null
bindEvents: =>
@container.mousedown((e) =>
# Tracks how many pixels we 'just' moved, so we can check
# on a click event. Value should be zero if either the last
# mousedown didn't initiate a drag, or if it did but the user
# didn't move it [much].
@_pixelsMoved = 0
# Abort early if not active, so that we don't preventDefault
# on an event someone else wants
# Added right click detection to hijack those from chrome's
# finicky handling
if not (@active and (e.which ==1 or e.which==3))
return
# This really is a grab -- preventdefault so the browser doesn't
# search for text-selections to make while the mouse is down.
# (Big performance issue on Chrome, see:
# http://code.google.com/p/chromium/issues/detail?id=103148 )
e.preventDefault()
# only left clicks cause dragging
if e.which == 1
# Start grabbing
@startgrab(e.target)
@_xp = e.pageX
@_yp = e.pageY
)
@container.mousemove((e) =>
if not @_isGrabbing
return true
xDiff = @_xp - e.pageX
yDiff = @_yp - e.pageY
@scrollBy(xDiff, yDiff)
@_xp = e.pageX
@_yp = e.pageY
@_pixelsMoved += Math.abs(xDiff) + Math.abs(yDiff)
)
@container.on('mouseup mouseleave', @stopgrab)
$(window).on('blur', @stopgrab)
@container.click((e) =>
if @_pixelsMoved > 5
# If we drag the surface, but happened to click a link, don't trigger
# the link's default click handler. This depends on there being no
# more-specific click event handlers. One way to achieve this is by
# using jq 'live' events.
e.preventDefault()
e.stopPropagation()
)
enable: =>
@active = true
disable: =>
@active = false
@stopgrab()
destroy: =>
@unbindEvents()
unbindEvents: =>
@container.off('mousedown mousemove click mouseup mouseleave')
$(window).off('blur', @stopgrab) |
[
{
"context": "# JSONRequest created by Jordan Dobson on 17 Nov 2016 - @jordandobson - jordandobson@gma",
"end": 38,
"score": 0.9998911023139954,
"start": 25,
"tag": "NAME",
"value": "Jordan Dobson"
},
{
"context": "NRequest created by Jordan Dobson on 17 Nov 2016 - @jordandobson - jo... | JSONRequest.coffee | Seattle-Framer/modules-Seattle | 12 | # JSONRequest created by Jordan Dobson on 17 Nov 2016 - @jordandobson - jordandobson@gmail.com
class exports.JSONRequest extends Framer.BaseClass
# https://davidwalsh.name/xmlhttprequest
DATA = "data"
EVENT = "event"
@LOAD = LOAD = "load"
@ABORT = ABORT = "abort"
@ERROR = ERROR = "error"
@PROGRESS = PROGRESS = "progress"
Events.JSONLoaded = "JSONRequest.#{LOAD }"
Events.JSONAbort = "JSONRequest.#{ABORT }"
Events.JSONError = "JSONRequest.#{ERROR }"
Events.JSONProgress = "JSONRequest.#{PROGRESS}"
Events.JSONEvent = "JSONRequest.#{EVENT }"
MSG_LOAD_ERROR = "Data didn't load"
MSG_LOAD_ABORT = "Loading Aborted"
@define DATA, get: -> @_getPropertyValue DATA
constructor: (options={}) ->
@request = @url = @response = @event = null
super options
@url = options.url
@request = new XMLHttpRequest()
setupEvents @
# Public Methods #####################################################
get: (url) ->
@request.open 'GET', url or @url, true
@request.send null
# Private Functions ##################################################
setupEvents = (self) ->
self.request.addEventListener LOAD, ((e) -> handleResponse self, e), false
self.request.addEventListener PROGRESS, ((e) -> handleResponse self, e), false
self.request.addEventListener ERROR, ((e) -> handleResponse self, e), false
self.request.addEventListener ABORT, ((e) -> handleResponse self, e), false
handleResponse = (self, event) ->
switch event.type
when LOAD then handleData self, event
when ERROR then handleException self, MSG_LOAD_ERROR, ERROR
when ABORT then handleException self, MSG_LOAD_ABORT, ABORT
when PROGRESS then handleProgress self
handleData = (self, event) ->
response = event.target.response
jsonParse = JSON.parse response
self.response = response
self._setPropertyValue DATA, jsonParse
self.emit Events.JSONLoaded, jsonParse
self.emit Events.JSONEvent, LOAD, jsonParse
handleProgress = (self) ->
self.emit Events.JSONProgress
self.emit Events.JSONEvent, PROGRESS
handleException = (self, message, type) ->
self.emit Events.JSONAbort if type is ABORT
self.emit Events.JSONError if type is ERROR
self.emit Events.JSONEvent, ERROR if type is ERROR
self.emit Events.JSONEvent, ABORT if type is ABORT
requestExceptionError ERROR, "JSONRequest from URL: #{self.url}" if type is ERROR
requestExceptionError ABORT, "JSONRequest aborted from URL: #{self.url}" if type is ABORT
# Fix Add Error Display
requestExceptionError = (type, message) ->
Framer.Extras.ErrorDisplay.enable()
if type is ERROR
console.error message
throw Error message
else
console.warn message if type is ABORT
| 179491 | # JSONRequest created by <NAME> on 17 Nov 2016 - @jordandobson - <EMAIL>
class exports.JSONRequest extends Framer.BaseClass
# https://davidwalsh.name/xmlhttprequest
DATA = "data"
EVENT = "event"
@LOAD = LOAD = "load"
@ABORT = ABORT = "abort"
@ERROR = ERROR = "error"
@PROGRESS = PROGRESS = "progress"
Events.JSONLoaded = "JSONRequest.#{LOAD }"
Events.JSONAbort = "JSONRequest.#{ABORT }"
Events.JSONError = "JSONRequest.#{ERROR }"
Events.JSONProgress = "JSONRequest.#{PROGRESS}"
Events.JSONEvent = "JSONRequest.#{EVENT }"
MSG_LOAD_ERROR = "Data didn't load"
MSG_LOAD_ABORT = "Loading Aborted"
@define DATA, get: -> @_getPropertyValue DATA
constructor: (options={}) ->
@request = @url = @response = @event = null
super options
@url = options.url
@request = new XMLHttpRequest()
setupEvents @
# Public Methods #####################################################
get: (url) ->
@request.open 'GET', url or @url, true
@request.send null
# Private Functions ##################################################
setupEvents = (self) ->
self.request.addEventListener LOAD, ((e) -> handleResponse self, e), false
self.request.addEventListener PROGRESS, ((e) -> handleResponse self, e), false
self.request.addEventListener ERROR, ((e) -> handleResponse self, e), false
self.request.addEventListener ABORT, ((e) -> handleResponse self, e), false
handleResponse = (self, event) ->
switch event.type
when LOAD then handleData self, event
when ERROR then handleException self, MSG_LOAD_ERROR, ERROR
when ABORT then handleException self, MSG_LOAD_ABORT, ABORT
when PROGRESS then handleProgress self
handleData = (self, event) ->
response = event.target.response
jsonParse = JSON.parse response
self.response = response
self._setPropertyValue DATA, jsonParse
self.emit Events.JSONLoaded, jsonParse
self.emit Events.JSONEvent, LOAD, jsonParse
handleProgress = (self) ->
self.emit Events.JSONProgress
self.emit Events.JSONEvent, PROGRESS
handleException = (self, message, type) ->
self.emit Events.JSONAbort if type is ABORT
self.emit Events.JSONError if type is ERROR
self.emit Events.JSONEvent, ERROR if type is ERROR
self.emit Events.JSONEvent, ABORT if type is ABORT
requestExceptionError ERROR, "JSONRequest from URL: #{self.url}" if type is ERROR
requestExceptionError ABORT, "JSONRequest aborted from URL: #{self.url}" if type is ABORT
# Fix Add Error Display
requestExceptionError = (type, message) ->
Framer.Extras.ErrorDisplay.enable()
if type is ERROR
console.error message
throw Error message
else
console.warn message if type is ABORT
| true | # JSONRequest created by PI:NAME:<NAME>END_PI on 17 Nov 2016 - @jordandobson - PI:EMAIL:<EMAIL>END_PI
class exports.JSONRequest extends Framer.BaseClass
# https://davidwalsh.name/xmlhttprequest
DATA = "data"
EVENT = "event"
@LOAD = LOAD = "load"
@ABORT = ABORT = "abort"
@ERROR = ERROR = "error"
@PROGRESS = PROGRESS = "progress"
Events.JSONLoaded = "JSONRequest.#{LOAD }"
Events.JSONAbort = "JSONRequest.#{ABORT }"
Events.JSONError = "JSONRequest.#{ERROR }"
Events.JSONProgress = "JSONRequest.#{PROGRESS}"
Events.JSONEvent = "JSONRequest.#{EVENT }"
MSG_LOAD_ERROR = "Data didn't load"
MSG_LOAD_ABORT = "Loading Aborted"
@define DATA, get: -> @_getPropertyValue DATA
constructor: (options={}) ->
@request = @url = @response = @event = null
super options
@url = options.url
@request = new XMLHttpRequest()
setupEvents @
# Public Methods #####################################################
get: (url) ->
@request.open 'GET', url or @url, true
@request.send null
# Private Functions ##################################################
setupEvents = (self) ->
self.request.addEventListener LOAD, ((e) -> handleResponse self, e), false
self.request.addEventListener PROGRESS, ((e) -> handleResponse self, e), false
self.request.addEventListener ERROR, ((e) -> handleResponse self, e), false
self.request.addEventListener ABORT, ((e) -> handleResponse self, e), false
handleResponse = (self, event) ->
switch event.type
when LOAD then handleData self, event
when ERROR then handleException self, MSG_LOAD_ERROR, ERROR
when ABORT then handleException self, MSG_LOAD_ABORT, ABORT
when PROGRESS then handleProgress self
handleData = (self, event) ->
response = event.target.response
jsonParse = JSON.parse response
self.response = response
self._setPropertyValue DATA, jsonParse
self.emit Events.JSONLoaded, jsonParse
self.emit Events.JSONEvent, LOAD, jsonParse
handleProgress = (self) ->
self.emit Events.JSONProgress
self.emit Events.JSONEvent, PROGRESS
handleException = (self, message, type) ->
self.emit Events.JSONAbort if type is ABORT
self.emit Events.JSONError if type is ERROR
self.emit Events.JSONEvent, ERROR if type is ERROR
self.emit Events.JSONEvent, ABORT if type is ABORT
requestExceptionError ERROR, "JSONRequest from URL: #{self.url}" if type is ERROR
requestExceptionError ABORT, "JSONRequest aborted from URL: #{self.url}" if type is ABORT
# Fix Add Error Display
requestExceptionError = (type, message) ->
Framer.Extras.ErrorDisplay.enable()
if type is ERROR
console.error message
throw Error message
else
console.warn message if type is ABORT
|
[
{
"context": " 'M-SEARCH * HTTP/1.1\\r\\n\\\n HOST: 239.255.255.250:1900\\r\\n\\\n MAN: \"ssdp:discover\"\\r",
"end": 2900,
"score": 0.9997106194496155,
"start": 2885,
"tag": "IP_ADDRESS",
"value": "239.255.255.250"
},
{
"context": "ast group\n search... | lib/roku-develop-ssdp.coffee | entrez/roku-develop | 15 | http = require 'http'
dgram = require 'dgram'
# RegEx to extract ip addr/serial number from M-SEARCH and NOTIFY responses
reIpAddr = /\r\nLocation\s*:\s*(?:.*?:\/\/)?([^:\/\r\n]+)/i
reSerialNumber = /\r\nUSN:\s*uuid:roku:ecp:\s*([A-Z0-9]+)/i
# Use a regular expression to extract a field from some data,
# returning an empty string if the field is not found
extract = (re, data) ->
m = re.exec data
if Array.isArray(m) and m.length is 2 then m[1] else ''
# Extract device details from a device's ECP response
# Not terribly efficient, but it doesn't need to be
parseDeviceDetails = (ipAddr, serialNumber, data) ->
sn = serialNumber or extract(/<serialNumber>(.*?)<\/serialNumber>/i, data)
{
ipAddr: ipAddr
serialNumber: sn
friendlyName: extract(/<friendlyName>(.*?)<\/friendlyName>/i, data)
modelName: extract(/<modelName>(.*?)<\/modelName>/i, data)
modelNumber: extract(/<modelNumber>(.*?)<\/modelNumber>/i, data)
}
# Send an ECP request to the device to get its details
# Invoke the callback to pass the device details back to the caller
deviceDiscovered = (ipAddr, serialNumber, discoveryCallback, autoDiscover) ->
bufferList = []
req = http.request({host: ipAddr, port: 8060, family: 4}, (res) =>
res.on('data', (chunk) =>
bufferList.push chunk
)
res.on('end', () =>
response = Buffer.concat(bufferList).toString()
details = parseDeviceDetails ipAddr, serialNumber, response
if details.serialNumber
discoveryCallback details, autoDiscover
)
)
# A 'socket' event is emitted after a socket is assigned to the request
# Handle this event to set a timeout on the socket connection
# This is instead of setting the timeout when http.request() is called,
# which would only be emitted after the socket is assigned and is connected,
# and would not detect a timeout while trying to establish the connection
req.on('socket', (socket) =>
socket.setTimeout 10000
socket.on('timeout', () =>
console.log 'deviceDiscovered socket timeout'
# A timeout does not abort the connection; it has to be done manually
# This will cause a createHangUpError error to be emitted on the request
req.abort()
)
)
# Even if there is an error on the ECP request, invoke the
# discoveryCallback with the known ip address and serial number
req.on('error', (error) =>
details = parseDeviceDetails ipAddr, serialNumber, ''
if details.serialNumber
discoveryCallback details, autoDiscover
console.warn 'ECP request to %s failed: %O', ipAddr, error
)
# The ECP request has an empty body
req.write('')
# Send the ECP request
req.end()
# Send an SSDP M-SEARCH discovery request
ssdpSearchRequest = (discoveryCallback) ->
ssdpRequest = new Buffer(
'M-SEARCH * HTTP/1.1\r\n\
HOST: 239.255.255.250:1900\r\n\
MAN: "ssdp:discover"\r\n\
ST: roku:ecp\r\n\
MX: 3\r\n\
\r\n'\
)
searchSocket = dgram.createSocket 'udp4'
searchSocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
#console.log 'M-SEARCH Response:', ssdpResponse
# Only add devices that have an ip address and serial number
# This will trigger an ECP request to get the device details
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Send the M-SEARCH request to the SSDP multicast group
searchSocket.send ssdpRequest, 1900, '239.255.255.250'
# Listen for SSDP discovery NOTIFY responses
# These should be received whenever a device connects to the network
ssdpNotify = (discoveryCallback) ->
notifySocket = dgram.createSocket {type: 'udp4', reuseAddr: true}
notifySocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
#console.log 'NOTIFY response', ssdpResponse
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
# Only add devices that have an ip address AND Roku serial number,
# to avoid sending ECP requests to non-Roku devices.
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Handle errors on the NOTIFY socket
# Note that in some cases an exception may be thrown,
# hence the try-catch statement in the bind callback
notifySocket.on('error', (err) =>
console.warn 'NOTIFY -- error: %O', err
)
# SSDP NOTIFY responses are directed to port 1900
notifySocket.bind(1900, () =>
try
# Prevent receipt of local SSDP M-SEARCH requests
notifySocket.setMulticastLoopback false
# Join the SSDP multicast group so we can receive SSDP NOTIFY responses
notifySocket.addMembership '239.255.255.250'
catch e
console.warn 'NOTIFY -- bind exception: %O', e
)
# If the network connection drops, then no further NOTIFY responses
# will be received on the bound port
# Since there is no indication of a network connection failure,
# after a predetermined timeout, close then re-establish the connection
setTimeout( () =>
try
notifySocket.close( () => ssdpNotify discoveryCallback )
catch e
console.warn 'NOTIFY -- Exception when trying to close socket: %O', e
, 5 * 60 * 1000 )
# The SSDP protocol, which uses UDP datagrams, is inherently flaky
# M-SEARCH responses are not guaranteed to be received.
# To make allowances for this, send out multiple M-SEARCH requests
ssdpSearch = (discoveryCallback) ->
setTimeout ssdpSearchRequest, 0, discoveryCallback
setTimeout ssdpSearchRequest, 15000, discoveryCallback
setTimeout ssdpSearchRequest, 30000, discoveryCallback
#setInterval ssdpSearchRequest, 120000, discoveryCallback
class RokuSSDP
# Initiate SSDP discovery
@discover = (discoveryCallback) ->
ssdpSearch discoveryCallback
ssdpNotify discoveryCallback
# Attempt to acquire device details from a user-entered, non-discovered
# device, for which the serial number is unknown
@ecp = (ipAddr, discoveryCallback) ->
deviceDiscovered ipAddr, '', discoveryCallback, false
module.exports = RokuSSDP
| 8561 | http = require 'http'
dgram = require 'dgram'
# RegEx to extract ip addr/serial number from M-SEARCH and NOTIFY responses
reIpAddr = /\r\nLocation\s*:\s*(?:.*?:\/\/)?([^:\/\r\n]+)/i
reSerialNumber = /\r\nUSN:\s*uuid:roku:ecp:\s*([A-Z0-9]+)/i
# Use a regular expression to extract a field from some data,
# returning an empty string if the field is not found
extract = (re, data) ->
m = re.exec data
if Array.isArray(m) and m.length is 2 then m[1] else ''
# Extract device details from a device's ECP response
# Not terribly efficient, but it doesn't need to be
parseDeviceDetails = (ipAddr, serialNumber, data) ->
sn = serialNumber or extract(/<serialNumber>(.*?)<\/serialNumber>/i, data)
{
ipAddr: ipAddr
serialNumber: sn
friendlyName: extract(/<friendlyName>(.*?)<\/friendlyName>/i, data)
modelName: extract(/<modelName>(.*?)<\/modelName>/i, data)
modelNumber: extract(/<modelNumber>(.*?)<\/modelNumber>/i, data)
}
# Send an ECP request to the device to get its details
# Invoke the callback to pass the device details back to the caller
deviceDiscovered = (ipAddr, serialNumber, discoveryCallback, autoDiscover) ->
bufferList = []
req = http.request({host: ipAddr, port: 8060, family: 4}, (res) =>
res.on('data', (chunk) =>
bufferList.push chunk
)
res.on('end', () =>
response = Buffer.concat(bufferList).toString()
details = parseDeviceDetails ipAddr, serialNumber, response
if details.serialNumber
discoveryCallback details, autoDiscover
)
)
# A 'socket' event is emitted after a socket is assigned to the request
# Handle this event to set a timeout on the socket connection
# This is instead of setting the timeout when http.request() is called,
# which would only be emitted after the socket is assigned and is connected,
# and would not detect a timeout while trying to establish the connection
req.on('socket', (socket) =>
socket.setTimeout 10000
socket.on('timeout', () =>
console.log 'deviceDiscovered socket timeout'
# A timeout does not abort the connection; it has to be done manually
# This will cause a createHangUpError error to be emitted on the request
req.abort()
)
)
# Even if there is an error on the ECP request, invoke the
# discoveryCallback with the known ip address and serial number
req.on('error', (error) =>
details = parseDeviceDetails ipAddr, serialNumber, ''
if details.serialNumber
discoveryCallback details, autoDiscover
console.warn 'ECP request to %s failed: %O', ipAddr, error
)
# The ECP request has an empty body
req.write('')
# Send the ECP request
req.end()
# Send an SSDP M-SEARCH discovery request
ssdpSearchRequest = (discoveryCallback) ->
ssdpRequest = new Buffer(
'M-SEARCH * HTTP/1.1\r\n\
HOST: 192.168.127.12:1900\r\n\
MAN: "ssdp:discover"\r\n\
ST: roku:ecp\r\n\
MX: 3\r\n\
\r\n'\
)
searchSocket = dgram.createSocket 'udp4'
searchSocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
#console.log 'M-SEARCH Response:', ssdpResponse
# Only add devices that have an ip address and serial number
# This will trigger an ECP request to get the device details
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Send the M-SEARCH request to the SSDP multicast group
searchSocket.send ssdpRequest, 1900, '192.168.127.12'
# Listen for SSDP discovery NOTIFY responses
# These should be received whenever a device connects to the network
ssdpNotify = (discoveryCallback) ->
notifySocket = dgram.createSocket {type: 'udp4', reuseAddr: true}
notifySocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
#console.log 'NOTIFY response', ssdpResponse
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
# Only add devices that have an ip address AND Roku serial number,
# to avoid sending ECP requests to non-Roku devices.
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Handle errors on the NOTIFY socket
# Note that in some cases an exception may be thrown,
# hence the try-catch statement in the bind callback
notifySocket.on('error', (err) =>
console.warn 'NOTIFY -- error: %O', err
)
# SSDP NOTIFY responses are directed to port 1900
notifySocket.bind(1900, () =>
try
# Prevent receipt of local SSDP M-SEARCH requests
notifySocket.setMulticastLoopback false
# Join the SSDP multicast group so we can receive SSDP NOTIFY responses
notifySocket.addMembership '192.168.127.12'
catch e
console.warn 'NOTIFY -- bind exception: %O', e
)
# If the network connection drops, then no further NOTIFY responses
# will be received on the bound port
# Since there is no indication of a network connection failure,
# after a predetermined timeout, close then re-establish the connection
setTimeout( () =>
try
notifySocket.close( () => ssdpNotify discoveryCallback )
catch e
console.warn 'NOTIFY -- Exception when trying to close socket: %O', e
, 5 * 60 * 1000 )
# The SSDP protocol, which uses UDP datagrams, is inherently flaky
# M-SEARCH responses are not guaranteed to be received.
# To make allowances for this, send out multiple M-SEARCH requests
ssdpSearch = (discoveryCallback) ->
setTimeout ssdpSearchRequest, 0, discoveryCallback
setTimeout ssdpSearchRequest, 15000, discoveryCallback
setTimeout ssdpSearchRequest, 30000, discoveryCallback
#setInterval ssdpSearchRequest, 120000, discoveryCallback
class RokuSSDP
# Initiate SSDP discovery
@discover = (discoveryCallback) ->
ssdpSearch discoveryCallback
ssdpNotify discoveryCallback
# Attempt to acquire device details from a user-entered, non-discovered
# device, for which the serial number is unknown
@ecp = (ipAddr, discoveryCallback) ->
deviceDiscovered ipAddr, '', discoveryCallback, false
module.exports = RokuSSDP
| true | http = require 'http'
dgram = require 'dgram'
# RegEx to extract ip addr/serial number from M-SEARCH and NOTIFY responses
reIpAddr = /\r\nLocation\s*:\s*(?:.*?:\/\/)?([^:\/\r\n]+)/i
reSerialNumber = /\r\nUSN:\s*uuid:roku:ecp:\s*([A-Z0-9]+)/i
# Use a regular expression to extract a field from some data,
# returning an empty string if the field is not found
extract = (re, data) ->
m = re.exec data
if Array.isArray(m) and m.length is 2 then m[1] else ''
# Extract device details from a device's ECP response
# Not terribly efficient, but it doesn't need to be
parseDeviceDetails = (ipAddr, serialNumber, data) ->
sn = serialNumber or extract(/<serialNumber>(.*?)<\/serialNumber>/i, data)
{
ipAddr: ipAddr
serialNumber: sn
friendlyName: extract(/<friendlyName>(.*?)<\/friendlyName>/i, data)
modelName: extract(/<modelName>(.*?)<\/modelName>/i, data)
modelNumber: extract(/<modelNumber>(.*?)<\/modelNumber>/i, data)
}
# Send an ECP request to the device to get its details
# Invoke the callback to pass the device details back to the caller
deviceDiscovered = (ipAddr, serialNumber, discoveryCallback, autoDiscover) ->
bufferList = []
req = http.request({host: ipAddr, port: 8060, family: 4}, (res) =>
res.on('data', (chunk) =>
bufferList.push chunk
)
res.on('end', () =>
response = Buffer.concat(bufferList).toString()
details = parseDeviceDetails ipAddr, serialNumber, response
if details.serialNumber
discoveryCallback details, autoDiscover
)
)
# A 'socket' event is emitted after a socket is assigned to the request
# Handle this event to set a timeout on the socket connection
# This is instead of setting the timeout when http.request() is called,
# which would only be emitted after the socket is assigned and is connected,
# and would not detect a timeout while trying to establish the connection
req.on('socket', (socket) =>
socket.setTimeout 10000
socket.on('timeout', () =>
console.log 'deviceDiscovered socket timeout'
# A timeout does not abort the connection; it has to be done manually
# This will cause a createHangUpError error to be emitted on the request
req.abort()
)
)
# Even if there is an error on the ECP request, invoke the
# discoveryCallback with the known ip address and serial number
req.on('error', (error) =>
details = parseDeviceDetails ipAddr, serialNumber, ''
if details.serialNumber
discoveryCallback details, autoDiscover
console.warn 'ECP request to %s failed: %O', ipAddr, error
)
# The ECP request has an empty body
req.write('')
# Send the ECP request
req.end()
# Send an SSDP M-SEARCH discovery request
ssdpSearchRequest = (discoveryCallback) ->
ssdpRequest = new Buffer(
'M-SEARCH * HTTP/1.1\r\n\
HOST: PI:IP_ADDRESS:192.168.127.12END_PI:1900\r\n\
MAN: "ssdp:discover"\r\n\
ST: roku:ecp\r\n\
MX: 3\r\n\
\r\n'\
)
searchSocket = dgram.createSocket 'udp4'
searchSocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
#console.log 'M-SEARCH Response:', ssdpResponse
# Only add devices that have an ip address and serial number
# This will trigger an ECP request to get the device details
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Send the M-SEARCH request to the SSDP multicast group
searchSocket.send ssdpRequest, 1900, 'PI:IP_ADDRESS:192.168.127.12END_PI'
# Listen for SSDP discovery NOTIFY responses
# These should be received whenever a device connects to the network
ssdpNotify = (discoveryCallback) ->
notifySocket = dgram.createSocket {type: 'udp4', reuseAddr: true}
notifySocket.on('message', (msg, rinfo) =>
ssdpResponse = msg.toString()
#console.log 'NOTIFY response', ssdpResponse
serialNumber = extract reSerialNumber, ssdpResponse
ipAddr = extract reIpAddr, ssdpResponse
# Only add devices that have an ip address AND Roku serial number,
# to avoid sending ECP requests to non-Roku devices.
if ipAddr and serialNumber
deviceDiscovered ipAddr, serialNumber, discoveryCallback, true
)
# Handle errors on the NOTIFY socket
# Note that in some cases an exception may be thrown,
# hence the try-catch statement in the bind callback
notifySocket.on('error', (err) =>
console.warn 'NOTIFY -- error: %O', err
)
# SSDP NOTIFY responses are directed to port 1900
notifySocket.bind(1900, () =>
try
# Prevent receipt of local SSDP M-SEARCH requests
notifySocket.setMulticastLoopback false
# Join the SSDP multicast group so we can receive SSDP NOTIFY responses
notifySocket.addMembership 'PI:IP_ADDRESS:192.168.127.12END_PI'
catch e
console.warn 'NOTIFY -- bind exception: %O', e
)
# If the network connection drops, then no further NOTIFY responses
# will be received on the bound port
# Since there is no indication of a network connection failure,
# after a predetermined timeout, close then re-establish the connection
setTimeout( () =>
try
notifySocket.close( () => ssdpNotify discoveryCallback )
catch e
console.warn 'NOTIFY -- Exception when trying to close socket: %O', e
, 5 * 60 * 1000 )
# The SSDP protocol, which uses UDP datagrams, is inherently flaky
# M-SEARCH responses are not guaranteed to be received.
# To make allowances for this, send out multiple M-SEARCH requests
ssdpSearch = (discoveryCallback) ->
setTimeout ssdpSearchRequest, 0, discoveryCallback
setTimeout ssdpSearchRequest, 15000, discoveryCallback
setTimeout ssdpSearchRequest, 30000, discoveryCallback
#setInterval ssdpSearchRequest, 120000, discoveryCallback
class RokuSSDP
# Initiate SSDP discovery
@discover = (discoveryCallback) ->
ssdpSearch discoveryCallback
ssdpNotify discoveryCallback
# Attempt to acquire device details from a user-entered, non-discovered
# device, for which the serial number is unknown
@ecp = (ipAddr, discoveryCallback) ->
deviceDiscovered ipAddr, '', discoveryCallback, false
module.exports = RokuSSDP
|
[
{
"context": "rsion 1.0.0\n@file Alert.js\n@author Welington Sampaio (http://welington.zaez.net/)\n@contact http://",
"end": 146,
"score": 0.9999037981033325,
"start": 129,
"tag": "NAME",
"value": "Welington Sampaio"
},
{
"context": "n.zaez.net/site/contato\n\n@co... | vendor/assets/javascripts/lol_framework/Alert.coffee | welingtonsampaio/lol-framework | 1 | ###
@summary Lol Framework
@description Framework of RIAs applications
@version 1.0.0
@file Alert.js
@author Welington Sampaio (http://welington.zaez.net/)
@contact http://welington.zaez.net/site/contato
@copyright Copyright 2012 Welington Sampaio, all rights reserved.
This source file is free software, under the license MIT, available at:
http://lolframework.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://welington.zaez.net
###
###
Create a new instance of Alert.
@classDescription This class creates a new Alert.
@param {Object} Receives configuration to create the Alert, @see Lol.alert.defaults
@return {Alert} Returns a new Alert.
@type {Object}
@example
*-* Create manual alert *-*
var lol_alert = new Lol.Alert({
debug : false,
autoRemove : true,
type : 'success',
message : 'Success to create a new object LolAlert',
delayRemove: 7000,
objects : {
containerID: '#alerts',
classes: {
container: 'alerts',
success : 'alert-success',
error : 'alert-error',
warning : 'alert-warning',
info : 'alert-info'
}
}
});
###
class Lol.Alert extends Lol.Core
# declaration of variables
debugPrefix : 'Lol_Alert'
namespace : '.alert'
# the methods
constructor: (args={})->
return false unless @verifyJQuery()
@settings = jQuery.extend true, {}, Lol.alert.defaults, args
@generateId()
return false unless @setContainer()
@createAlert()
@setInterval()
createClose: ->
close = jQuery '<button type="button" class="close" data-dismiss="alert">×</button>'
@alert.append close
createAlert: ->
@debug 'Create an object Alert'
@alert = jQuery '<div></div>'
@alert.addClass "alert #{@settings.objects.classes[@settings.type]}"
@alert.append @settings.message
@createClose()
@alert.appendTo @container
destroy: ->
@debug 'Initializing the destroy method'
@alert.fadeOut ->
jQuery(@).alert 'close'
clearInterval(@interval) if @settings.autoRemove
super
setContainer: ->
@debug 'Setting a container object'
if not jQuery( @settings.objects.containerID )
throw "Required container Alert: #{@settings.objects.containerID}"
return false
@container = jQuery @settings.objects.containerID
setInterval: ->
@debug 'Setting interval?',@settings.autoRemove, 'With delay:',@settings.delayRemove
_this = @
@interval = setInterval(->
_this.destroy()
, @settings.delayRemove) if @settings.autoRemove
Lol.alert =
defaults:
debug : false
autoRemove : true
type : 'success' # Options success | error | warning | info
message : null
delayRemove: 7000
objects :
containerID: '#alerts'
classes:
container: 'alerts'
success : 'alert-success'
error : 'alert-error'
warning : 'alert-warning'
info : 'alert-info'
| 197181 | ###
@summary Lol Framework
@description Framework of RIAs applications
@version 1.0.0
@file Alert.js
@author <NAME> (http://welington.zaez.net/)
@contact http://welington.zaez.net/site/contato
@copyright Copyright 2012 <NAME>, all rights reserved.
This source file is free software, under the license MIT, available at:
http://lolframework.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://welington.zaez.net
###
###
Create a new instance of Alert.
@classDescription This class creates a new Alert.
@param {Object} Receives configuration to create the Alert, @see Lol.alert.defaults
@return {Alert} Returns a new Alert.
@type {Object}
@example
*-* Create manual alert *-*
var lol_alert = new Lol.Alert({
debug : false,
autoRemove : true,
type : 'success',
message : 'Success to create a new object LolAlert',
delayRemove: 7000,
objects : {
containerID: '#alerts',
classes: {
container: 'alerts',
success : 'alert-success',
error : 'alert-error',
warning : 'alert-warning',
info : 'alert-info'
}
}
});
###
class Lol.Alert extends Lol.Core
# declaration of variables
debugPrefix : 'Lol_Alert'
namespace : '.alert'
# the methods
constructor: (args={})->
return false unless @verifyJQuery()
@settings = jQuery.extend true, {}, Lol.alert.defaults, args
@generateId()
return false unless @setContainer()
@createAlert()
@setInterval()
createClose: ->
close = jQuery '<button type="button" class="close" data-dismiss="alert">×</button>'
@alert.append close
createAlert: ->
@debug 'Create an object Alert'
@alert = jQuery '<div></div>'
@alert.addClass "alert #{@settings.objects.classes[@settings.type]}"
@alert.append @settings.message
@createClose()
@alert.appendTo @container
destroy: ->
@debug 'Initializing the destroy method'
@alert.fadeOut ->
jQuery(@).alert 'close'
clearInterval(@interval) if @settings.autoRemove
super
setContainer: ->
@debug 'Setting a container object'
if not jQuery( @settings.objects.containerID )
throw "Required container Alert: #{@settings.objects.containerID}"
return false
@container = jQuery @settings.objects.containerID
setInterval: ->
@debug 'Setting interval?',@settings.autoRemove, 'With delay:',@settings.delayRemove
_this = @
@interval = setInterval(->
_this.destroy()
, @settings.delayRemove) if @settings.autoRemove
Lol.alert =
defaults:
debug : false
autoRemove : true
type : 'success' # Options success | error | warning | info
message : null
delayRemove: 7000
objects :
containerID: '#alerts'
classes:
container: 'alerts'
success : 'alert-success'
error : 'alert-error'
warning : 'alert-warning'
info : 'alert-info'
| true | ###
@summary Lol Framework
@description Framework of RIAs applications
@version 1.0.0
@file Alert.js
@author PI:NAME:<NAME>END_PI (http://welington.zaez.net/)
@contact http://welington.zaez.net/site/contato
@copyright Copyright 2012 PI:NAME:<NAME>END_PI, all rights reserved.
This source file is free software, under the license MIT, available at:
http://lolframework.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://welington.zaez.net
###
###
Create a new instance of Alert.
@classDescription This class creates a new Alert.
@param {Object} Receives configuration to create the Alert, @see Lol.alert.defaults
@return {Alert} Returns a new Alert.
@type {Object}
@example
*-* Create manual alert *-*
var lol_alert = new Lol.Alert({
debug : false,
autoRemove : true,
type : 'success',
message : 'Success to create a new object LolAlert',
delayRemove: 7000,
objects : {
containerID: '#alerts',
classes: {
container: 'alerts',
success : 'alert-success',
error : 'alert-error',
warning : 'alert-warning',
info : 'alert-info'
}
}
});
###
class Lol.Alert extends Lol.Core
# declaration of variables
debugPrefix : 'Lol_Alert'
namespace : '.alert'
# the methods
constructor: (args={})->
return false unless @verifyJQuery()
@settings = jQuery.extend true, {}, Lol.alert.defaults, args
@generateId()
return false unless @setContainer()
@createAlert()
@setInterval()
createClose: ->
close = jQuery '<button type="button" class="close" data-dismiss="alert">×</button>'
@alert.append close
createAlert: ->
@debug 'Create an object Alert'
@alert = jQuery '<div></div>'
@alert.addClass "alert #{@settings.objects.classes[@settings.type]}"
@alert.append @settings.message
@createClose()
@alert.appendTo @container
destroy: ->
@debug 'Initializing the destroy method'
@alert.fadeOut ->
jQuery(@).alert 'close'
clearInterval(@interval) if @settings.autoRemove
super
setContainer: ->
@debug 'Setting a container object'
if not jQuery( @settings.objects.containerID )
throw "Required container Alert: #{@settings.objects.containerID}"
return false
@container = jQuery @settings.objects.containerID
setInterval: ->
@debug 'Setting interval?',@settings.autoRemove, 'With delay:',@settings.delayRemove
_this = @
@interval = setInterval(->
_this.destroy()
, @settings.delayRemove) if @settings.autoRemove
Lol.alert =
defaults:
debug : false
autoRemove : true
type : 'success' # Options success | error | warning | info
message : null
delayRemove: 7000
objects :
containerID: '#alerts'
classes:
container: 'alerts'
success : 'alert-success'
error : 'alert-error'
warning : 'alert-warning'
info : 'alert-info'
|
[
{
"context": "\"+deploy_user\n host : deploy_user+\"@danielepelagatti.com\"\n\n\n gruntConfig.htmlmin = \n all :\n ",
"end": 9390,
"score": 0.9891127347946167,
"start": 9369,
"tag": "EMAIL",
"value": "@danielepelagatti.com"
}
] | Gruntfile.coffee | daniele-pelagatti/danielepelagatti.com | 0 | module.exports = (grunt)->
# Project configuration.
# debugger
env = grunt.option("env") || "prod";
deploy_user = grunt.option("user");
grunt.task.loadTasks("tasks")
require("load-grunt-tasks")(grunt);
rewriteModule = require("http-rewrite-middleware");
optional_files = [
"src/js/optional/three.js"
"src/js/optional/spin.js"
"src/js/optional/*.js"
"src/js/optional/shaders.js"
"src/js/optional/main.js"
]
essential_files = [
"src/js/essential/jquery.js"
"src/js/essential/jquery.leanModal.js"
"src/js/essential/essential.js"
]
gruntConfig =
pkg: grunt.file.readJSON("package.json")
gruntConfig.percolator =
compile:
source : gruntConfig.pkg.coffee_folder
output : gruntConfig.pkg.compiled_js
main : gruntConfig.pkg.percolator_main
compile : true
gruntConfig.compass =
compile:
options:
sassDir : gruntConfig.pkg.compass_folder
cssDir : gruntConfig.pkg.compass_output_folder
outputStyle : "expanded"
gruntConfig.glsl_threejs =
compile:
files : {}
gruntConfig.glsl_threejs.compile.files[gruntConfig.pkg.glsl_output_file] = [gruntConfig.pkg.glsl_folder+"/*.vert",gruntConfig.pkg.glsl_folder+"/*.frag"]
gruntConfig.watch =
options:
livereload : 35729
interrupt : true
coffee:
files: [gruntConfig.pkg.watch_folder+"/**/*.coffee"]
tasks: if env == "prod" then ["percolator","uglify:optional","notify:js"] else ["percolator","concat:optional","notify:js"]
glsl_threejs:
files: [gruntConfig.pkg.watch_folder+"/**/*.{frag,vert}"]
tasks: if env == "prod" then ["glsl_threejs","uglify:optional","notify:js"] else ["glsl_threejs","concat:optional","notify:js"]
compass:
files: [gruntConfig.pkg.watch_folder+"/**/*.{scss,sass}"]
tasks: if env == "prod" then ["compass","cssmin","notify:css"] else ["compass","concat:css","notify:css"]
jsonmin:
files: [gruntConfig.pkg.watch_folder+"/maya/data/*.json"]
tasks: if env == "prod" then ["jsonmin","notify:json"] else ["copy:json","notify:json"]
jade:
files: [gruntConfig.pkg.watch_folder+"/**/*.{jade,md}"]
tasks: if env == "prod" then ["compile_markdown_files","htmlmin","notify:markdown"] else ["compile_markdown_files","notify:markdown"]
uglify_essential :
files: [gruntConfig.pkg.watch_folder+"/js/essential/*.js"]
tasks: if env == "prod" then ["uglify:essential","notify:js"] else ["concat:essential","notify:js"]
imagemin :
files: [gruntConfig.pkg.watch_folder+"/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:site","notify:images"]
imagemin2 :
files: [gruntConfig.pkg.watch_folder+"/maya/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:maya","notify:images"]
copy :
files: [gruntConfig.pkg.watch_folder+"/include/**"]
tasks: ["copy","notify:includes"]
gruntConfig.concurrent =
options:
logConcurrentOutput : true
default: ["watch", "connect","notify:server"]
gruntConfig.connect =
default:
options:
port : 8000
hostname : "*"
keepalive : true
livereload : 35729
base : gruntConfig.pkg.www_folder
# middleware: (connect, options)->
# middlewares = [];
# # RewriteRules support
# middlewares.push(rewriteModule.getMiddleware([
# # rewrite everything not contained in these folders to index.html
# {from: "^/(?!css|js|img|maya|en|it).*$", to: "/index.html"}
# ]));
# if !Array.isArray(options.base)
# options.base = [options.base];
# directory = options.directory || options.base[options.base.length - 1];
# options.base.forEach((base)->
# # Serve static files.
# middlewares.push(connect.static(base));
# );
# # Make directory browse-able.
# middlewares.push(connect.directory(directory));
# return middlewares;
gruntConfig.uglify =
options :
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
drop_console : true
optional :
files : {}
essential:
files: {}
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_optional_js_file] = optional_files
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_essential_js_file] = essential_files
gruntConfig.cssmin =
all :
options:
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
keepSpecialComments : false
files :{}
gruntConfig.cssmin.all.files[gruntConfig.pkg.minified_main_css_file] = [gruntConfig.pkg.compass_output_folder+"/**/*.css"];
gruntConfig.modernizr =
dist:
devFile : "modernizr.dev.js"
outputFile : gruntConfig.pkg.www_folder+"/js/modernizr.js"
uglify : if env == "prod" then true else false
matchCommunityTests : true
files :
src: ["src/**/*.{css,js}"]
gruntConfig.copy =
include:
files : [
{
expand : true
cwd : "src/include/"
src : ["**"]
dest : gruntConfig.pkg.www_folder+"/"
dot : true
}
]
json:
files : [
{
expand : true
cwd : "src/maya/data/"
src : ["*.json"]
dest : gruntConfig.pkg.www_folder+"/maya/data"
dot : true
}
]
gruntConfig.concat =
optional:
options:
separator : ";"
src : optional_files
dest : gruntConfig.pkg.minified_optional_js_file
essential:
options:
separator : ";"
src : essential_files
dest : gruntConfig.pkg.minified_essential_js_file
css:
src : gruntConfig.pkg.compass_output_folder+"/**/*.css"
dest : gruntConfig.pkg.minified_main_css_file
gruntConfig.jsonmin =
maya:
options:
stripWhitespace:true
stripComments:true
files: {}
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene.json"] = "src/maya/data/scene.json"
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene_canvas.json"] = "src/maya/data/scene_canvas.json"
gruntConfig.clean =
all: [gruntConfig.pkg.www_folder]
gruntConfig.compile_markdown_files =
all:
options:
markdown_folder : gruntConfig.pkg.markdown_folder
jade_folder : gruntConfig.pkg.jade_folder
www_folder : gruntConfig.pkg.www_folder
default_document : gruntConfig.pkg.default_document
config_json : gruntConfig.pkg.config_json
environment : env
gruntConfig.imagemin =
site:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/images/"
src : ["*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/img"
]
maya:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/maya/images/"
src : ["**/*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/maya/images"
]
gruntConfig.rsync =
options:
args : ["--verbose"]
recursive : true
# dryRun : true
syncDestIgnoreExcl : true
exclude : ["casa","cgi-bin","old","error_log","php.ini","tmp"]
dist:
options:
src : gruntConfig.pkg.www_folder
dest : "/home2/"+deploy_user
host : deploy_user+"@danielepelagatti.com"
gruntConfig.htmlmin =
all :
options:
removeComments: true
collapseWhitespace: true
files: [
expand : true
cwd : gruntConfig.pkg.www_folder
src : ["**/*.{html,shtml}"]
dest : gruntConfig.pkg.www_folder
]
gruntConfig.notify =
server:
options:
message: "Server Ready"
rsync:
options:
message: "Rsync Done"
images:
options:
message: "Images compiled"
markdown:
options:
message: "Markdown files compiled"
json:
options:
message: "Json files compiled"
css:
options:
message: "CSS files compiled"
js:
options:
message: "JS files compiled"
includes:
options:
message: "Includes files copied"
build:
options:
message: "Build Done"
grunt.initConfig(gruntConfig)
# Default task(s).
grunt.registerTask("deploy", ["rsync"]);
if env == "prod"
grunt.registerTask("build", ["clean","imagemin","copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","htmlmin","notify:build"]);
# deploy only production
else
grunt.registerTask("build", ["clean","imagemin","copy","percolator","compass","glsl_threejs","compile_markdown_files","concat","modernizr","notify:build"]);
grunt.registerTask("minify", ["copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","notify:build"]);
grunt.registerTask("default", ["build","concurrent"]);
| 112826 | module.exports = (grunt)->
# Project configuration.
# debugger
env = grunt.option("env") || "prod";
deploy_user = grunt.option("user");
grunt.task.loadTasks("tasks")
require("load-grunt-tasks")(grunt);
rewriteModule = require("http-rewrite-middleware");
optional_files = [
"src/js/optional/three.js"
"src/js/optional/spin.js"
"src/js/optional/*.js"
"src/js/optional/shaders.js"
"src/js/optional/main.js"
]
essential_files = [
"src/js/essential/jquery.js"
"src/js/essential/jquery.leanModal.js"
"src/js/essential/essential.js"
]
gruntConfig =
pkg: grunt.file.readJSON("package.json")
gruntConfig.percolator =
compile:
source : gruntConfig.pkg.coffee_folder
output : gruntConfig.pkg.compiled_js
main : gruntConfig.pkg.percolator_main
compile : true
gruntConfig.compass =
compile:
options:
sassDir : gruntConfig.pkg.compass_folder
cssDir : gruntConfig.pkg.compass_output_folder
outputStyle : "expanded"
gruntConfig.glsl_threejs =
compile:
files : {}
gruntConfig.glsl_threejs.compile.files[gruntConfig.pkg.glsl_output_file] = [gruntConfig.pkg.glsl_folder+"/*.vert",gruntConfig.pkg.glsl_folder+"/*.frag"]
gruntConfig.watch =
options:
livereload : 35729
interrupt : true
coffee:
files: [gruntConfig.pkg.watch_folder+"/**/*.coffee"]
tasks: if env == "prod" then ["percolator","uglify:optional","notify:js"] else ["percolator","concat:optional","notify:js"]
glsl_threejs:
files: [gruntConfig.pkg.watch_folder+"/**/*.{frag,vert}"]
tasks: if env == "prod" then ["glsl_threejs","uglify:optional","notify:js"] else ["glsl_threejs","concat:optional","notify:js"]
compass:
files: [gruntConfig.pkg.watch_folder+"/**/*.{scss,sass}"]
tasks: if env == "prod" then ["compass","cssmin","notify:css"] else ["compass","concat:css","notify:css"]
jsonmin:
files: [gruntConfig.pkg.watch_folder+"/maya/data/*.json"]
tasks: if env == "prod" then ["jsonmin","notify:json"] else ["copy:json","notify:json"]
jade:
files: [gruntConfig.pkg.watch_folder+"/**/*.{jade,md}"]
tasks: if env == "prod" then ["compile_markdown_files","htmlmin","notify:markdown"] else ["compile_markdown_files","notify:markdown"]
uglify_essential :
files: [gruntConfig.pkg.watch_folder+"/js/essential/*.js"]
tasks: if env == "prod" then ["uglify:essential","notify:js"] else ["concat:essential","notify:js"]
imagemin :
files: [gruntConfig.pkg.watch_folder+"/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:site","notify:images"]
imagemin2 :
files: [gruntConfig.pkg.watch_folder+"/maya/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:maya","notify:images"]
copy :
files: [gruntConfig.pkg.watch_folder+"/include/**"]
tasks: ["copy","notify:includes"]
gruntConfig.concurrent =
options:
logConcurrentOutput : true
default: ["watch", "connect","notify:server"]
gruntConfig.connect =
default:
options:
port : 8000
hostname : "*"
keepalive : true
livereload : 35729
base : gruntConfig.pkg.www_folder
# middleware: (connect, options)->
# middlewares = [];
# # RewriteRules support
# middlewares.push(rewriteModule.getMiddleware([
# # rewrite everything not contained in these folders to index.html
# {from: "^/(?!css|js|img|maya|en|it).*$", to: "/index.html"}
# ]));
# if !Array.isArray(options.base)
# options.base = [options.base];
# directory = options.directory || options.base[options.base.length - 1];
# options.base.forEach((base)->
# # Serve static files.
# middlewares.push(connect.static(base));
# );
# # Make directory browse-able.
# middlewares.push(connect.directory(directory));
# return middlewares;
gruntConfig.uglify =
options :
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
drop_console : true
optional :
files : {}
essential:
files: {}
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_optional_js_file] = optional_files
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_essential_js_file] = essential_files
gruntConfig.cssmin =
all :
options:
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
keepSpecialComments : false
files :{}
gruntConfig.cssmin.all.files[gruntConfig.pkg.minified_main_css_file] = [gruntConfig.pkg.compass_output_folder+"/**/*.css"];
gruntConfig.modernizr =
dist:
devFile : "modernizr.dev.js"
outputFile : gruntConfig.pkg.www_folder+"/js/modernizr.js"
uglify : if env == "prod" then true else false
matchCommunityTests : true
files :
src: ["src/**/*.{css,js}"]
gruntConfig.copy =
include:
files : [
{
expand : true
cwd : "src/include/"
src : ["**"]
dest : gruntConfig.pkg.www_folder+"/"
dot : true
}
]
json:
files : [
{
expand : true
cwd : "src/maya/data/"
src : ["*.json"]
dest : gruntConfig.pkg.www_folder+"/maya/data"
dot : true
}
]
gruntConfig.concat =
optional:
options:
separator : ";"
src : optional_files
dest : gruntConfig.pkg.minified_optional_js_file
essential:
options:
separator : ";"
src : essential_files
dest : gruntConfig.pkg.minified_essential_js_file
css:
src : gruntConfig.pkg.compass_output_folder+"/**/*.css"
dest : gruntConfig.pkg.minified_main_css_file
gruntConfig.jsonmin =
maya:
options:
stripWhitespace:true
stripComments:true
files: {}
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene.json"] = "src/maya/data/scene.json"
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene_canvas.json"] = "src/maya/data/scene_canvas.json"
gruntConfig.clean =
all: [gruntConfig.pkg.www_folder]
gruntConfig.compile_markdown_files =
all:
options:
markdown_folder : gruntConfig.pkg.markdown_folder
jade_folder : gruntConfig.pkg.jade_folder
www_folder : gruntConfig.pkg.www_folder
default_document : gruntConfig.pkg.default_document
config_json : gruntConfig.pkg.config_json
environment : env
gruntConfig.imagemin =
site:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/images/"
src : ["*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/img"
]
maya:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/maya/images/"
src : ["**/*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/maya/images"
]
gruntConfig.rsync =
options:
args : ["--verbose"]
recursive : true
# dryRun : true
syncDestIgnoreExcl : true
exclude : ["casa","cgi-bin","old","error_log","php.ini","tmp"]
dist:
options:
src : gruntConfig.pkg.www_folder
dest : "/home2/"+deploy_user
host : deploy_user+"<EMAIL>"
gruntConfig.htmlmin =
all :
options:
removeComments: true
collapseWhitespace: true
files: [
expand : true
cwd : gruntConfig.pkg.www_folder
src : ["**/*.{html,shtml}"]
dest : gruntConfig.pkg.www_folder
]
gruntConfig.notify =
server:
options:
message: "Server Ready"
rsync:
options:
message: "Rsync Done"
images:
options:
message: "Images compiled"
markdown:
options:
message: "Markdown files compiled"
json:
options:
message: "Json files compiled"
css:
options:
message: "CSS files compiled"
js:
options:
message: "JS files compiled"
includes:
options:
message: "Includes files copied"
build:
options:
message: "Build Done"
grunt.initConfig(gruntConfig)
# Default task(s).
grunt.registerTask("deploy", ["rsync"]);
if env == "prod"
grunt.registerTask("build", ["clean","imagemin","copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","htmlmin","notify:build"]);
# deploy only production
else
grunt.registerTask("build", ["clean","imagemin","copy","percolator","compass","glsl_threejs","compile_markdown_files","concat","modernizr","notify:build"]);
grunt.registerTask("minify", ["copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","notify:build"]);
grunt.registerTask("default", ["build","concurrent"]);
| true | module.exports = (grunt)->
# Project configuration.
# debugger
env = grunt.option("env") || "prod";
deploy_user = grunt.option("user");
grunt.task.loadTasks("tasks")
require("load-grunt-tasks")(grunt);
rewriteModule = require("http-rewrite-middleware");
optional_files = [
"src/js/optional/three.js"
"src/js/optional/spin.js"
"src/js/optional/*.js"
"src/js/optional/shaders.js"
"src/js/optional/main.js"
]
essential_files = [
"src/js/essential/jquery.js"
"src/js/essential/jquery.leanModal.js"
"src/js/essential/essential.js"
]
gruntConfig =
pkg: grunt.file.readJSON("package.json")
gruntConfig.percolator =
compile:
source : gruntConfig.pkg.coffee_folder
output : gruntConfig.pkg.compiled_js
main : gruntConfig.pkg.percolator_main
compile : true
gruntConfig.compass =
compile:
options:
sassDir : gruntConfig.pkg.compass_folder
cssDir : gruntConfig.pkg.compass_output_folder
outputStyle : "expanded"
gruntConfig.glsl_threejs =
compile:
files : {}
gruntConfig.glsl_threejs.compile.files[gruntConfig.pkg.glsl_output_file] = [gruntConfig.pkg.glsl_folder+"/*.vert",gruntConfig.pkg.glsl_folder+"/*.frag"]
gruntConfig.watch =
options:
livereload : 35729
interrupt : true
coffee:
files: [gruntConfig.pkg.watch_folder+"/**/*.coffee"]
tasks: if env == "prod" then ["percolator","uglify:optional","notify:js"] else ["percolator","concat:optional","notify:js"]
glsl_threejs:
files: [gruntConfig.pkg.watch_folder+"/**/*.{frag,vert}"]
tasks: if env == "prod" then ["glsl_threejs","uglify:optional","notify:js"] else ["glsl_threejs","concat:optional","notify:js"]
compass:
files: [gruntConfig.pkg.watch_folder+"/**/*.{scss,sass}"]
tasks: if env == "prod" then ["compass","cssmin","notify:css"] else ["compass","concat:css","notify:css"]
jsonmin:
files: [gruntConfig.pkg.watch_folder+"/maya/data/*.json"]
tasks: if env == "prod" then ["jsonmin","notify:json"] else ["copy:json","notify:json"]
jade:
files: [gruntConfig.pkg.watch_folder+"/**/*.{jade,md}"]
tasks: if env == "prod" then ["compile_markdown_files","htmlmin","notify:markdown"] else ["compile_markdown_files","notify:markdown"]
uglify_essential :
files: [gruntConfig.pkg.watch_folder+"/js/essential/*.js"]
tasks: if env == "prod" then ["uglify:essential","notify:js"] else ["concat:essential","notify:js"]
imagemin :
files: [gruntConfig.pkg.watch_folder+"/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:site","notify:images"]
imagemin2 :
files: [gruntConfig.pkg.watch_folder+"/maya/images/**/*.{jpg,png,gif}"]
tasks: ["imagemin:maya","notify:images"]
copy :
files: [gruntConfig.pkg.watch_folder+"/include/**"]
tasks: ["copy","notify:includes"]
gruntConfig.concurrent =
options:
logConcurrentOutput : true
default: ["watch", "connect","notify:server"]
gruntConfig.connect =
default:
options:
port : 8000
hostname : "*"
keepalive : true
livereload : 35729
base : gruntConfig.pkg.www_folder
# middleware: (connect, options)->
# middlewares = [];
# # RewriteRules support
# middlewares.push(rewriteModule.getMiddleware([
# # rewrite everything not contained in these folders to index.html
# {from: "^/(?!css|js|img|maya|en|it).*$", to: "/index.html"}
# ]));
# if !Array.isArray(options.base)
# options.base = [options.base];
# directory = options.directory || options.base[options.base.length - 1];
# options.base.forEach((base)->
# # Serve static files.
# middlewares.push(connect.static(base));
# );
# # Make directory browse-able.
# middlewares.push(connect.directory(directory));
# return middlewares;
gruntConfig.uglify =
options :
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
drop_console : true
optional :
files : {}
essential:
files: {}
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_optional_js_file] = optional_files
gruntConfig.uglify.optional.files[gruntConfig.pkg.minified_essential_js_file] = essential_files
gruntConfig.cssmin =
all :
options:
banner : "/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today(\"yyyy-mm-dd\") %> */\n"
keepSpecialComments : false
files :{}
gruntConfig.cssmin.all.files[gruntConfig.pkg.minified_main_css_file] = [gruntConfig.pkg.compass_output_folder+"/**/*.css"];
gruntConfig.modernizr =
dist:
devFile : "modernizr.dev.js"
outputFile : gruntConfig.pkg.www_folder+"/js/modernizr.js"
uglify : if env == "prod" then true else false
matchCommunityTests : true
files :
src: ["src/**/*.{css,js}"]
gruntConfig.copy =
include:
files : [
{
expand : true
cwd : "src/include/"
src : ["**"]
dest : gruntConfig.pkg.www_folder+"/"
dot : true
}
]
json:
files : [
{
expand : true
cwd : "src/maya/data/"
src : ["*.json"]
dest : gruntConfig.pkg.www_folder+"/maya/data"
dot : true
}
]
gruntConfig.concat =
optional:
options:
separator : ";"
src : optional_files
dest : gruntConfig.pkg.minified_optional_js_file
essential:
options:
separator : ";"
src : essential_files
dest : gruntConfig.pkg.minified_essential_js_file
css:
src : gruntConfig.pkg.compass_output_folder+"/**/*.css"
dest : gruntConfig.pkg.minified_main_css_file
gruntConfig.jsonmin =
maya:
options:
stripWhitespace:true
stripComments:true
files: {}
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene.json"] = "src/maya/data/scene.json"
gruntConfig.jsonmin.maya.files[gruntConfig.pkg.www_folder+"/maya/data/scene_canvas.json"] = "src/maya/data/scene_canvas.json"
gruntConfig.clean =
all: [gruntConfig.pkg.www_folder]
gruntConfig.compile_markdown_files =
all:
options:
markdown_folder : gruntConfig.pkg.markdown_folder
jade_folder : gruntConfig.pkg.jade_folder
www_folder : gruntConfig.pkg.www_folder
default_document : gruntConfig.pkg.default_document
config_json : gruntConfig.pkg.config_json
environment : env
gruntConfig.imagemin =
site:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/images/"
src : ["*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/img"
]
maya:
options:
optimizationLevel : 7
pngquant : true
interlaced : true
progressive : true
parallelProcesses : 1
files: [
expand : true
cwd : "src/maya/images/"
src : ["**/*.{png,jpg,gif}"]
dest : gruntConfig.pkg.www_folder+"/maya/images"
]
gruntConfig.rsync =
options:
args : ["--verbose"]
recursive : true
# dryRun : true
syncDestIgnoreExcl : true
exclude : ["casa","cgi-bin","old","error_log","php.ini","tmp"]
dist:
options:
src : gruntConfig.pkg.www_folder
dest : "/home2/"+deploy_user
host : deploy_user+"PI:EMAIL:<EMAIL>END_PI"
gruntConfig.htmlmin =
all :
options:
removeComments: true
collapseWhitespace: true
files: [
expand : true
cwd : gruntConfig.pkg.www_folder
src : ["**/*.{html,shtml}"]
dest : gruntConfig.pkg.www_folder
]
gruntConfig.notify =
server:
options:
message: "Server Ready"
rsync:
options:
message: "Rsync Done"
images:
options:
message: "Images compiled"
markdown:
options:
message: "Markdown files compiled"
json:
options:
message: "Json files compiled"
css:
options:
message: "CSS files compiled"
js:
options:
message: "JS files compiled"
includes:
options:
message: "Includes files copied"
build:
options:
message: "Build Done"
grunt.initConfig(gruntConfig)
# Default task(s).
grunt.registerTask("deploy", ["rsync"]);
if env == "prod"
grunt.registerTask("build", ["clean","imagemin","copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","htmlmin","notify:build"]);
# deploy only production
else
grunt.registerTask("build", ["clean","imagemin","copy","percolator","compass","glsl_threejs","compile_markdown_files","concat","modernizr","notify:build"]);
grunt.registerTask("minify", ["copy:include","percolator","compass","glsl_threejs","compile_markdown_files","uglify","cssmin","jsonmin","modernizr","notify:build"]);
grunt.registerTask("default", ["build","concurrent"]);
|
[
{
"context": "###*\n* BaseScene class\n*\n* @author David Jegat <david.jegat@gmail.com>\n###\nclass BaseScene\n ###",
"end": 46,
"score": 0.9998867511749268,
"start": 35,
"tag": "NAME",
"value": "David Jegat"
},
{
"context": "###*\n* BaseScene class\n*\n* @author David Jegat <david.... | src/Scene/BaseScene.coffee | Djeg/MicroRacing | 1 | ###*
* BaseScene class
*
* @author David Jegat <david.jegat@gmail.com>
###
class BaseScene
###*
* @constructor
###
constructor: () ->
@game = null
@input = null
@mouse = null
@name = @constructor.name.replace 'Scene', ''
@graphics = new GraphicCollection
@actors = new ActorCollection
###*
* Set up the game for the scene
*
* @param {Game} game
* @throws String, if the game is not a Game
###
setUpGame: (game) ->
if not game instanceof Game
throw "Invalid game instance"
@game = game
@input = game.input
@mouse = game.mouse
###*
* Set up the graĥics
###
setUpGraphics: () ->
for name, graphic of @graphics.items
graphic.setUp @game
###*
* Set up actors
###
setUpActors: () ->
for name, actor of @actors.items
actor.setUp @game
###*
* Test if the scene is loaded
*
* @return {boolean}
###
isLoad: () ->
for name, graphic of @graphics.items
if not graphic.isLoad()
return false
true
###*
* Display a loading screne
*
* @throws String
###
loading: () ->
throw "Pending"
###*
* Initialize graphics and actors or one scene
*
* @throws String
###
init: () ->
throw "Scene #{@constructor.name} init need to be defined"
###*
* Initialize graphics
###
initGraphics: () ->
for name, graphic of @graphics.items
graphic.init()
###*
* Display the scene on the canvas screen
*
* @throws String
###
display: () ->
for actor in @actors.getOrderedItems()
actor.acting()
| 182255 | ###*
* BaseScene class
*
* @author <NAME> <<EMAIL>>
###
class BaseScene
###*
* @constructor
###
constructor: () ->
@game = null
@input = null
@mouse = null
@name = @constructor.name.replace 'Scene', ''
@graphics = new GraphicCollection
@actors = new ActorCollection
###*
* Set up the game for the scene
*
* @param {Game} game
* @throws String, if the game is not a Game
###
setUpGame: (game) ->
if not game instanceof Game
throw "Invalid game instance"
@game = game
@input = game.input
@mouse = game.mouse
###*
* Set up the graĥics
###
setUpGraphics: () ->
for name, graphic of @graphics.items
graphic.setUp @game
###*
* Set up actors
###
setUpActors: () ->
for name, actor of @actors.items
actor.setUp @game
###*
* Test if the scene is loaded
*
* @return {boolean}
###
isLoad: () ->
for name, graphic of @graphics.items
if not graphic.isLoad()
return false
true
###*
* Display a loading screne
*
* @throws String
###
loading: () ->
throw "Pending"
###*
* Initialize graphics and actors or one scene
*
* @throws String
###
init: () ->
throw "Scene #{@constructor.name} init need to be defined"
###*
* Initialize graphics
###
initGraphics: () ->
for name, graphic of @graphics.items
graphic.init()
###*
* Display the scene on the canvas screen
*
* @throws String
###
display: () ->
for actor in @actors.getOrderedItems()
actor.acting()
| true | ###*
* BaseScene class
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
class BaseScene
###*
* @constructor
###
constructor: () ->
@game = null
@input = null
@mouse = null
@name = @constructor.name.replace 'Scene', ''
@graphics = new GraphicCollection
@actors = new ActorCollection
###*
* Set up the game for the scene
*
* @param {Game} game
* @throws String, if the game is not a Game
###
setUpGame: (game) ->
if not game instanceof Game
throw "Invalid game instance"
@game = game
@input = game.input
@mouse = game.mouse
###*
* Set up the graĥics
###
setUpGraphics: () ->
for name, graphic of @graphics.items
graphic.setUp @game
###*
* Set up actors
###
setUpActors: () ->
for name, actor of @actors.items
actor.setUp @game
###*
* Test if the scene is loaded
*
* @return {boolean}
###
isLoad: () ->
for name, graphic of @graphics.items
if not graphic.isLoad()
return false
true
###*
* Display a loading screne
*
* @throws String
###
loading: () ->
throw "Pending"
###*
* Initialize graphics and actors or one scene
*
* @throws String
###
init: () ->
throw "Scene #{@constructor.name} init need to be defined"
###*
* Initialize graphics
###
initGraphics: () ->
for name, graphic of @graphics.items
graphic.init()
###*
* Display the scene on the canvas screen
*
* @throws String
###
display: () ->
for actor in @actors.getOrderedItems()
actor.acting()
|
[
{
"context": "@sheet.authorize({\n client_email: 'bad@email.com',\n private_key: 'bad key'\n ",
"end": 525,
"score": 0.9999127388000488,
"start": 512,
"tag": "EMAIL",
"value": "bad@email.com"
},
{
"context": "l: 'bad@email.com',\n p... | tests/models/test_sheet.coffee | fangamer/ibizan | 0 |
moment = require 'moment'
chai = require 'chai'
assert = chai.assert
expect = chai.expect
Spreadsheet = require '../../src/models/sheet'
MockSheet = require '../mocks/mock_sheet.js'
Punch = require '../../src/models/punch'
describe 'Sheet', ->
beforeEach ->
sheet_id = 'bad id'
@sheet = new Spreadsheet(sheet_id)
@sheet.sheet = MockSheet
describe '#constructor', ->
describe '#authorize(auth)', ->
it 'should authorize', ->
return @sheet.authorize({
client_email: 'bad@email.com',
private_key: 'bad key'
})
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#loadOptions()', ->
it 'should load options successfully', ->
return @sheet.loadOptions()
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#enterPunch(punch, user)', ->
it 'should fail without a punch', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
return that.sheet.enterPunch null, user
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should fail without a user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, null
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should enter in punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should attempt out punch for user, but fail due to lack of notes', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.fail('success')
)
.catch((err) ->
assert.isOk true
)
it 'should enter out punch for user', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out did some things', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter special punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'vacation 6/8-6/12', 'vacation'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter block punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
blockPunch = Punch.parse user, '4.5 hours'
return that.sheet.enterPunch blockPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#generateReport()', ->
it "should generate payroll reports between the start and
end times for each of the users passed", ->
that = @
userCount = 0
end = moment()
start = end.subtract(2, 'weeks')
return @sheet.loadOptions()
.then((opts) ->
userCount = opts.users.length
return that.sheet.generateReport(opts.users, start, end)
)
.then((numberDone) ->
expect(numberDone).to.be.equal userCount
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadWorksheets()', ->
it 'should load worksheets as properties', ->
that = @
return @sheet._loadWorksheets()
.then(() ->
expect(that.sheet).to.have.deep.property 'rawData'
expect(that.sheet).to.have.deep.property 'payroll'
expect(that.sheet).to.have.deep.property 'variables'
expect(that.sheet).to.have.deep.property 'projects'
expect(that.sheet).to.have.deep.property 'employees'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadVariables(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadVariables.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'vacation', 104
expect(opts).to.have.deep.property 'sick', 40
expect(opts).to.have.deep.property 'holidays'
expect(opts).to.have.deep.property 'clockChannel', 'timeclock'
expect(opts).to.have.deep.property 'exemptChannels'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadProjects(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadProjects.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'projects'
expect(opts).to.have.deep.property 'projects[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadEmployees(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadPunches(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then(@sheet._loadPunches.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
expect(opts).to.have.deep.property 'users[0].punches'
expect(opts).to.have.deep.property 'users[0].punches[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
| 70609 |
moment = require 'moment'
chai = require 'chai'
assert = chai.assert
expect = chai.expect
Spreadsheet = require '../../src/models/sheet'
MockSheet = require '../mocks/mock_sheet.js'
Punch = require '../../src/models/punch'
describe 'Sheet', ->
beforeEach ->
sheet_id = 'bad id'
@sheet = new Spreadsheet(sheet_id)
@sheet.sheet = MockSheet
describe '#constructor', ->
describe '#authorize(auth)', ->
it 'should authorize', ->
return @sheet.authorize({
client_email: '<EMAIL>',
private_key: '<KEY>'
})
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#loadOptions()', ->
it 'should load options successfully', ->
return @sheet.loadOptions()
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#enterPunch(punch, user)', ->
it 'should fail without a punch', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
return that.sheet.enterPunch null, user
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should fail without a user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, null
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should enter in punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should attempt out punch for user, but fail due to lack of notes', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.fail('success')
)
.catch((err) ->
assert.isOk true
)
it 'should enter out punch for user', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out did some things', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter special punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'vacation 6/8-6/12', 'vacation'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter block punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
blockPunch = Punch.parse user, '4.5 hours'
return that.sheet.enterPunch blockPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#generateReport()', ->
it "should generate payroll reports between the start and
end times for each of the users passed", ->
that = @
userCount = 0
end = moment()
start = end.subtract(2, 'weeks')
return @sheet.loadOptions()
.then((opts) ->
userCount = opts.users.length
return that.sheet.generateReport(opts.users, start, end)
)
.then((numberDone) ->
expect(numberDone).to.be.equal userCount
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadWorksheets()', ->
it 'should load worksheets as properties', ->
that = @
return @sheet._loadWorksheets()
.then(() ->
expect(that.sheet).to.have.deep.property 'rawData'
expect(that.sheet).to.have.deep.property 'payroll'
expect(that.sheet).to.have.deep.property 'variables'
expect(that.sheet).to.have.deep.property 'projects'
expect(that.sheet).to.have.deep.property 'employees'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadVariables(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadVariables.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'vacation', 104
expect(opts).to.have.deep.property 'sick', 40
expect(opts).to.have.deep.property 'holidays'
expect(opts).to.have.deep.property 'clockChannel', 'timeclock'
expect(opts).to.have.deep.property 'exemptChannels'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadProjects(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadProjects.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'projects'
expect(opts).to.have.deep.property 'projects[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadEmployees(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadPunches(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then(@sheet._loadPunches.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
expect(opts).to.have.deep.property 'users[0].punches'
expect(opts).to.have.deep.property 'users[0].punches[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
| true |
moment = require 'moment'
chai = require 'chai'
assert = chai.assert
expect = chai.expect
Spreadsheet = require '../../src/models/sheet'
MockSheet = require '../mocks/mock_sheet.js'
Punch = require '../../src/models/punch'
describe 'Sheet', ->
beforeEach ->
sheet_id = 'bad id'
@sheet = new Spreadsheet(sheet_id)
@sheet.sheet = MockSheet
describe '#constructor', ->
describe '#authorize(auth)', ->
it 'should authorize', ->
return @sheet.authorize({
client_email: 'PI:EMAIL:<EMAIL>END_PI',
private_key: 'PI:KEY:<KEY>END_PI'
})
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#loadOptions()', ->
it 'should load options successfully', ->
return @sheet.loadOptions()
.then(() -> assert.isOk(true))
.catch((err) -> assert.fail('success', err))
describe '#enterPunch(punch, user)', ->
it 'should fail without a punch', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
return that.sheet.enterPunch null, user
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should fail without a user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, null
)
.then(() ->
assert.fail 'Invalid parameters passed: Punch or user is undefined.', null
)
.catch((err) ->
assert.isNotNull err
)
it 'should enter in punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should attempt out punch for user, but fail due to lack of notes', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.fail('success')
)
.catch((err) ->
assert.isOk true
)
it 'should enter out punch for user', ->
that = @
user = null
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
if last = user.lastPunch 'in'
outPunch = Punch.parse user, 'out did some things', 'out'
promise = that.sheet.enterPunch outPunch, user
if promise
return promise
else
inPunch = Punch.parse user, 'in', 'in'
return that.sheet.enterPunch(inPunch, user)
.then(that.sheet.enterPunch(outPunch, user).bind(that))
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter special punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
inPunch = Punch.parse user, 'vacation 6/8-6/12', 'vacation'
return that.sheet.enterPunch inPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
it 'should enter block punch for user', ->
that = @
return @sheet.loadOptions()
.then((opts) ->
user = opts.users[0]
blockPunch = Punch.parse user, '4.5 hours'
return that.sheet.enterPunch blockPunch, user
)
.then(() ->
assert.isOk true
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#generateReport()', ->
it "should generate payroll reports between the start and
end times for each of the users passed", ->
that = @
userCount = 0
end = moment()
start = end.subtract(2, 'weeks')
return @sheet.loadOptions()
.then((opts) ->
userCount = opts.users.length
return that.sheet.generateReport(opts.users, start, end)
)
.then((numberDone) ->
expect(numberDone).to.be.equal userCount
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadWorksheets()', ->
it 'should load worksheets as properties', ->
that = @
return @sheet._loadWorksheets()
.then(() ->
expect(that.sheet).to.have.deep.property 'rawData'
expect(that.sheet).to.have.deep.property 'payroll'
expect(that.sheet).to.have.deep.property 'variables'
expect(that.sheet).to.have.deep.property 'projects'
expect(that.sheet).to.have.deep.property 'employees'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadVariables(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadVariables.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'vacation', 104
expect(opts).to.have.deep.property 'sick', 40
expect(opts).to.have.deep.property 'holidays'
expect(opts).to.have.deep.property 'clockChannel', 'timeclock'
expect(opts).to.have.deep.property 'exemptChannels'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadProjects(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadProjects.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'projects'
expect(opts).to.have.deep.property 'projects[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadEmployees(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
describe '#_loadPunches(opts)', ->
it 'should load variables successfully', ->
return @sheet._loadWorksheets()
.then(@sheet._loadEmployees.bind(@sheet))
.then(@sheet._loadPunches.bind(@sheet))
.then((opts) ->
expect(opts).to.have.deep.property 'users'
expect(opts).to.have.deep.property 'users[0]'
expect(opts).to.have.deep.property 'users[0].punches'
expect(opts).to.have.deep.property 'users[0].punches[0]'
)
.catch((err) ->
console.log err
assert.fail('success', err)
)
|
[
{
"context": "()\n alice.ensureMainSynchro euid\n\n # Connect Alice to Bob, but stop automatic propagation of changes",
"end": 645,
"score": 0.9420238733291626,
"start": 640,
"tag": "NAME",
"value": "Alice"
},
{
"context": "ice.ensureMainSynchro euid\n\n # Connect Alice to B... | packages/nog-sync/nog-sync-remote-tests.coffee | nogproject/nog | 0 | { createTestPeers } = require './nog-sync-peer-tests.coffee'
{ createContentFaker } = require './nog-sync-store-tests.coffee'
describe 'nog-sync', -> describe 'remote, real ops', ->
peers = null
euid = null
alice = null
bob = null
bobSyncStore = null
bobSyncOwner = null
bobContentStore = null
bobContentFaker = null
remote = null
before ->
peers = createTestPeers()
alice = peers.AliceMain
bob = peers.BobMain
bobSyncStore = bob.syncStore
bobSyncOwner = peers.bobOwner
bobContentStore = peers.bobOpts.contentStore
alice.ensureSyncUsers()
alice.ensureMainSynchro euid
# Connect Alice to Bob, but stop automatic propagation of changes from Bob,
# so that the tests can explicitly call fetch.
#
# By default create fake content at Bob and check that fetch transfers to
# Alice.
alice.connectRemotes()
remote = alice.remotes[peers.rndBob]
remote.observer.stop()
bob.ensureSyncUsers()
bob.ensureMainSynchro euid
bobContentFaker = createContentFaker()
bobContentFaker.insertFakeUsers { users: bobContentStore.users }
after ->
alice.disconnectRemotes()
peers.cleanup()
# `waitForSubUpdate({ master })` spins until the subscription to the remote
# synchro's master has updated to the expected ref.
waitForSubUpdate = (opts) ->
{ master } = opts
getMaster = ->
remote.remoteSynchros.findOne({ name: 'all' }).refs['branches/master']
n = 100
until getMaster() == master and n > 0
n--
Meteor._sleepForMs 5
unless getMaster() == master
throw new Error('Did not receive the expected update.')
bobGetSynchroMaster = ->
synchro = bobSyncStore.synchros.findOne({ name: 'all' })
return synchro.refs['branches/master']
aliceGetRemoteRef = ->
synchro = alice.syncStore.synchros.findOne({ name: 'all' })
return synchro.refs["remotes/#{peers.rndBob}/branches/master"]
aliceExpectContentEntries = (entries) ->
{ commits, trees, objects, blobs } = peers.aliceOpts.contentStore
for ent in entries
switch ent.type
when 'commit'
expect(commits.findOne(ent.sha)).to.exist
when 'tree'
expect(trees.findOne(ent.sha)).to.exist
when 'object'
expect(objects.findOne(ent.sha)).to.exist
when 'blob'
expect(blobs.findOne(ent.sha)).to.exist
bobSnapshot = ->
bobSyncStore.snapshot euid, { ownerName: bobSyncOwner, synchroName: 'all' }
it 'fetches new real content', ->
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'commit', sha: spec.commit._id }
{ type: 'tree', sha: spec.tree._id }
{ type: 'object', sha: spec.object._id }
])
it 'fetches updated real content', ->
expected = []
commit = ->
bobContentFaker.commitFakeContent()
{ spec } = bobContentFaker
expected.push { type: 'commit', sha: spec.commit._id }
expected.push { type: 'tree', sha: spec.tree._id }
expected.push { type: 'object', sha: spec.object._id }
commit()
commit()
commit()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
aliceExpectContentEntries expected
it 'adds placeholder blobs', ->
bobContentFaker.commitBlob()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'blob', sha: spec.blob._id }
])
describe 'content transfer sha check', ->
origCall = null
beforeEach ->
# Change content before each to ensure fresh transfer in each test.
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
origCall = remote.call
remote.call = _.clone remote.call
afterEach ->
remote.call = origCall
it 'detects content commit transfer sha mismatch', ->
origGet = remote.call.getContentCommit
remote.call.getContentCommit = (args...) ->
content = origGet args...
content.subject = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content tree transfer sha mismatch', ->
origGet = remote.call.getContentTree
remote.call.getContentTree = (args...) ->
content = origGet args...
content.name = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content object transfer sha mismatch', ->
origGet = remote.call.getContentObject
remote.call.getContentObject = (args...) ->
content = origGet args...
content.name = Random.id()
return content
origGetEntries = remote.call.getContentEntries
remote.call.getContentEntries = (args...) ->
res = origGetEntries args...
for obj in res.objects
obj.name = Random.id()
return res
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
| 63584 | { createTestPeers } = require './nog-sync-peer-tests.coffee'
{ createContentFaker } = require './nog-sync-store-tests.coffee'
describe 'nog-sync', -> describe 'remote, real ops', ->
peers = null
euid = null
alice = null
bob = null
bobSyncStore = null
bobSyncOwner = null
bobContentStore = null
bobContentFaker = null
remote = null
before ->
peers = createTestPeers()
alice = peers.AliceMain
bob = peers.BobMain
bobSyncStore = bob.syncStore
bobSyncOwner = peers.bobOwner
bobContentStore = peers.bobOpts.contentStore
alice.ensureSyncUsers()
alice.ensureMainSynchro euid
# Connect <NAME> to <NAME>, but stop automatic propagation of changes from Bob,
# so that the tests can explicitly call fetch.
#
# By default create fake content at <NAME> and check that fetch transfers to
# <NAME>.
alice.connectRemotes()
remote = alice.remotes[peers.rndBob]
remote.observer.stop()
bob.ensureSyncUsers()
bob.ensureMainSynchro euid
bobContentFaker = createContentFaker()
bobContentFaker.insertFakeUsers { users: bobContentStore.users }
after ->
alice.disconnectRemotes()
peers.cleanup()
# `waitForSubUpdate({ master })` spins until the subscription to the remote
# synchro's master has updated to the expected ref.
waitForSubUpdate = (opts) ->
{ master } = opts
getMaster = ->
remote.remoteSynchros.findOne({ name: 'all' }).refs['branches/master']
n = 100
until getMaster() == master and n > 0
n--
Meteor._sleepForMs 5
unless getMaster() == master
throw new Error('Did not receive the expected update.')
bobGetSynchroMaster = ->
synchro = bobSyncStore.synchros.findOne({ name: 'all' })
return synchro.refs['branches/master']
aliceGetRemoteRef = ->
synchro = alice.syncStore.synchros.findOne({ name: 'all' })
return synchro.refs["remotes/#{peers.rndBob}/branches/master"]
aliceExpectContentEntries = (entries) ->
{ commits, trees, objects, blobs } = peers.aliceOpts.contentStore
for ent in entries
switch ent.type
when 'commit'
expect(commits.findOne(ent.sha)).to.exist
when 'tree'
expect(trees.findOne(ent.sha)).to.exist
when 'object'
expect(objects.findOne(ent.sha)).to.exist
when 'blob'
expect(blobs.findOne(ent.sha)).to.exist
bobSnapshot = ->
bobSyncStore.snapshot euid, { ownerName: bobSyncOwner, synchroName: 'all' }
it 'fetches new real content', ->
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'commit', sha: spec.commit._id }
{ type: 'tree', sha: spec.tree._id }
{ type: 'object', sha: spec.object._id }
])
it 'fetches updated real content', ->
expected = []
commit = ->
bobContentFaker.commitFakeContent()
{ spec } = bobContentFaker
expected.push { type: 'commit', sha: spec.commit._id }
expected.push { type: 'tree', sha: spec.tree._id }
expected.push { type: 'object', sha: spec.object._id }
commit()
commit()
commit()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
aliceExpectContentEntries expected
it 'adds placeholder blobs', ->
bobContentFaker.commitBlob()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'blob', sha: spec.blob._id }
])
describe 'content transfer sha check', ->
origCall = null
beforeEach ->
# Change content before each to ensure fresh transfer in each test.
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
origCall = remote.call
remote.call = _.clone remote.call
afterEach ->
remote.call = origCall
it 'detects content commit transfer sha mismatch', ->
origGet = remote.call.getContentCommit
remote.call.getContentCommit = (args...) ->
content = origGet args...
content.subject = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content tree transfer sha mismatch', ->
origGet = remote.call.getContentTree
remote.call.getContentTree = (args...) ->
content = origGet args...
content.name = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content object transfer sha mismatch', ->
origGet = remote.call.getContentObject
remote.call.getContentObject = (args...) ->
content = origGet args...
content.name = Random.id()
return content
origGetEntries = remote.call.getContentEntries
remote.call.getContentEntries = (args...) ->
res = origGetEntries args...
for obj in res.objects
obj.name = Random.id()
return res
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
| true | { createTestPeers } = require './nog-sync-peer-tests.coffee'
{ createContentFaker } = require './nog-sync-store-tests.coffee'
describe 'nog-sync', -> describe 'remote, real ops', ->
peers = null
euid = null
alice = null
bob = null
bobSyncStore = null
bobSyncOwner = null
bobContentStore = null
bobContentFaker = null
remote = null
before ->
peers = createTestPeers()
alice = peers.AliceMain
bob = peers.BobMain
bobSyncStore = bob.syncStore
bobSyncOwner = peers.bobOwner
bobContentStore = peers.bobOpts.contentStore
alice.ensureSyncUsers()
alice.ensureMainSynchro euid
# Connect PI:NAME:<NAME>END_PI to PI:NAME:<NAME>END_PI, but stop automatic propagation of changes from Bob,
# so that the tests can explicitly call fetch.
#
# By default create fake content at PI:NAME:<NAME>END_PI and check that fetch transfers to
# PI:NAME:<NAME>END_PI.
alice.connectRemotes()
remote = alice.remotes[peers.rndBob]
remote.observer.stop()
bob.ensureSyncUsers()
bob.ensureMainSynchro euid
bobContentFaker = createContentFaker()
bobContentFaker.insertFakeUsers { users: bobContentStore.users }
after ->
alice.disconnectRemotes()
peers.cleanup()
# `waitForSubUpdate({ master })` spins until the subscription to the remote
# synchro's master has updated to the expected ref.
waitForSubUpdate = (opts) ->
{ master } = opts
getMaster = ->
remote.remoteSynchros.findOne({ name: 'all' }).refs['branches/master']
n = 100
until getMaster() == master and n > 0
n--
Meteor._sleepForMs 5
unless getMaster() == master
throw new Error('Did not receive the expected update.')
bobGetSynchroMaster = ->
synchro = bobSyncStore.synchros.findOne({ name: 'all' })
return synchro.refs['branches/master']
aliceGetRemoteRef = ->
synchro = alice.syncStore.synchros.findOne({ name: 'all' })
return synchro.refs["remotes/#{peers.rndBob}/branches/master"]
aliceExpectContentEntries = (entries) ->
{ commits, trees, objects, blobs } = peers.aliceOpts.contentStore
for ent in entries
switch ent.type
when 'commit'
expect(commits.findOne(ent.sha)).to.exist
when 'tree'
expect(trees.findOne(ent.sha)).to.exist
when 'object'
expect(objects.findOne(ent.sha)).to.exist
when 'blob'
expect(blobs.findOne(ent.sha)).to.exist
bobSnapshot = ->
bobSyncStore.snapshot euid, { ownerName: bobSyncOwner, synchroName: 'all' }
it 'fetches new real content', ->
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'commit', sha: spec.commit._id }
{ type: 'tree', sha: spec.tree._id }
{ type: 'object', sha: spec.object._id }
])
it 'fetches updated real content', ->
expected = []
commit = ->
bobContentFaker.commitFakeContent()
{ spec } = bobContentFaker
expected.push { type: 'commit', sha: spec.commit._id }
expected.push { type: 'tree', sha: spec.tree._id }
expected.push { type: 'object', sha: spec.object._id }
commit()
commit()
commit()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
aliceExpectContentEntries expected
it 'adds placeholder blobs', ->
bobContentFaker.commitBlob()
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
remote.fetch()
expect(aliceGetRemoteRef()).to.eql master
{ spec } = bobContentFaker
aliceExpectContentEntries([
{ type: 'blob', sha: spec.blob._id }
])
describe 'content transfer sha check', ->
origCall = null
beforeEach ->
# Change content before each to ensure fresh transfer in each test.
bobContentFaker.createFakeContent { euid, store: bobContentStore }
bobSnapshot()
master = bobGetSynchroMaster()
waitForSubUpdate { master }
origCall = remote.call
remote.call = _.clone remote.call
afterEach ->
remote.call = origCall
it 'detects content commit transfer sha mismatch', ->
origGet = remote.call.getContentCommit
remote.call.getContentCommit = (args...) ->
content = origGet args...
content.subject = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content tree transfer sha mismatch', ->
origGet = remote.call.getContentTree
remote.call.getContentTree = (args...) ->
content = origGet args...
content.name = Random.id()
return content
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
it 'detects content object transfer sha mismatch', ->
origGet = remote.call.getContentObject
remote.call.getContentObject = (args...) ->
content = origGet args...
content.name = Random.id()
return content
origGetEntries = remote.call.getContentEntries
remote.call.getContentEntries = (args...) ->
res = origGetEntries args...
for obj in res.objects
obj.name = Random.id()
return res
fn = -> remote.fetch()
expect(fn).to.throw '[ERR_CONTENT_CHECKSUM]'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.