entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "\"globalconfig = \" + common.globalconfig\n \"email = i@izs.me\"\n \"env-thing = asdf\"\n \"init.author.name = Isaac",
"end": 230,
"score": 0.9999275803565979,
"start": 222,
"tag": "EMAIL",
"value": "i@izs.me"
},
{
"context": "izs.me\"\n \"env-thing = asdf\"\n \"in... | deps/npm/test/tap/config-save.coffee | lxe/io.coffee | 0 | fs = require("fs")
ini = require("ini")
test = require("tap").test
npmconf = require("../../lib/config/core.js")
common = require("./00-config-setup.js")
expectConf = [
"globalconfig = " + common.globalconfig
"email = i@izs.me"
"env-thing = asdf"
"init.author.name = Isaac Z. Schlueter"
"init.author.email = i@izs.me"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = dXNlcm5hbWU6cGFzc3dvcmQ="
""
"[_token]"
"AuthSession = yabba-dabba-doodle"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
expectFile = [
"globalconfig = " + common.globalconfig
"email = i@izs.me"
"env-thing = asdf"
"init.author.name = Isaac Z. Schlueter"
"init.author.email = i@izs.me"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = dXNlcm5hbWU6cGFzc3dvcmQ="
""
"[_token]"
"AuthSession = yabba-dabba-doodle"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
test "saving configs", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.set "sign-git-tag", false, "user"
conf.del "nodedir"
conf.del "tmp"
foundConf = ini.stringify(conf.sources.user.data)
t.same ini.parse(foundConf), ini.parse(expectConf)
fs.unlinkSync common.userconfig
conf.save "user", (er) ->
throw er if er
uc = fs.readFileSync(conf.get("userconfig"), "utf8")
t.same ini.parse(uc), ini.parse(expectFile)
t.end()
return
return
return
test "setting prefix", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.prefix = "newvalue"
t.same conf.prefix, "newvalue"
t.end()
return
return
| 205426 | fs = require("fs")
ini = require("ini")
test = require("tap").test
npmconf = require("../../lib/config/core.js")
common = require("./00-config-setup.js")
expectConf = [
"globalconfig = " + common.globalconfig
"email = <EMAIL>"
"env-thing = asdf"
"init.author.name = <NAME>"
"init.author.email = <EMAIL>"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = <KEY>="
""
"[_token]"
"AuthSession = <PASSWORD>"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
expectFile = [
"globalconfig = " + common.globalconfig
"email = <EMAIL>"
"env-thing = asdf"
"init.author.name = <NAME>"
"init.author.email = <EMAIL>"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = <KEY> <PASSWORD> <KEY> <PASSWORD>6cGFzc3dvcmQ="
""
"[_token]"
"AuthSession = <PASSWORD>"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
test "saving configs", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.set "sign-git-tag", false, "user"
conf.del "nodedir"
conf.del "tmp"
foundConf = ini.stringify(conf.sources.user.data)
t.same ini.parse(foundConf), ini.parse(expectConf)
fs.unlinkSync common.userconfig
conf.save "user", (er) ->
throw er if er
uc = fs.readFileSync(conf.get("userconfig"), "utf8")
t.same ini.parse(uc), ini.parse(expectFile)
t.end()
return
return
return
test "setting prefix", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.prefix = "newvalue"
t.same conf.prefix, "newvalue"
t.end()
return
return
| true | fs = require("fs")
ini = require("ini")
test = require("tap").test
npmconf = require("../../lib/config/core.js")
common = require("./00-config-setup.js")
expectConf = [
"globalconfig = " + common.globalconfig
"email = PI:EMAIL:<EMAIL>END_PI"
"env-thing = asdf"
"init.author.name = PI:NAME:<NAME>END_PI"
"init.author.email = PI:EMAIL:<EMAIL>END_PI"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = PI:KEY:<KEY>END_PI="
""
"[_token]"
"AuthSession = PI:PASSWORD:<PASSWORD>END_PI"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
expectFile = [
"globalconfig = " + common.globalconfig
"email = PI:EMAIL:<EMAIL>END_PI"
"env-thing = asdf"
"init.author.name = PI:NAME:<NAME>END_PI"
"init.author.email = PI:EMAIL:<EMAIL>END_PI"
"init.author.url = http://blog.izs.me/"
"init.version = 1.2.3"
"proprietary-attribs = false"
"npm:publishtest = true"
"_npmjs.org:couch = https://admin:password@localhost:5984/registry"
"npm-www:nocache = 1"
"sign-git-tag = false"
"message = v%s"
"strict-ssl = false"
"_auth = PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI6cGFzc3dvcmQ="
""
"[_token]"
"AuthSession = PI:PASSWORD:<PASSWORD>END_PI"
"version = 1"
"expires = 1345001053415"
"path = /"
"httponly = true"
""
].join("\n")
test "saving configs", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.set "sign-git-tag", false, "user"
conf.del "nodedir"
conf.del "tmp"
foundConf = ini.stringify(conf.sources.user.data)
t.same ini.parse(foundConf), ini.parse(expectConf)
fs.unlinkSync common.userconfig
conf.save "user", (er) ->
throw er if er
uc = fs.readFileSync(conf.get("userconfig"), "utf8")
t.same ini.parse(uc), ini.parse(expectFile)
t.end()
return
return
return
test "setting prefix", (t) ->
npmconf.load (er, conf) ->
throw er if er
conf.prefix = "newvalue"
t.same conf.prefix, "newvalue"
t.end()
return
return
|
[
{
"context": "tworks:\n PubRybMarTest:\n 'ipv4.address': '172.16.0.1/24'\n 'ipv4.nat': true\n 'ipv6.address': ",
"end": 1027,
"score": 0.9996675252914429,
"start": 1017,
"tag": "IP_ADDRESS",
"value": "172.16.0.1"
},
{
"context": "ikita'\n PrivRybMarTest:\n '... | packages/mariadb/env/build_env.coffee | ryba-io/ryba | 24 | nikita = require 'nikita'
path = require 'path'
# Notes:
# SSH private and public keys will be generated in an "assets" directory
# inside the current working directory.
nikita
.log.cli pad: host: 20, header: 60
# Delete existing test containers
.lxd.delete
header: 'Remove existing node1'
if_exec: '[[ `lxc ls | grep node1` ]] && exit 0 || exit 1'
force: true
container: 'node1'
.lxd.delete
header: 'Remove existing node2'
if_exec: '[[ `lxc ls | grep node2` ]] && exit 0 || exit 1'
force: true
container: 'node2'
.lxd.network.delete
header: 'Remove existing Network PubRybMarTest'
if_exec: '[[ `lxc network ls | grep PubRybMarTest` ]] && exit 0 || exit 1'
network: 'PubRybMarTest'
.lxd.network.delete
header: 'Remove existing Network PrivRybMarTest'
if_exec: '[[ `lxc network ls | grep PrivRybMarTest` ]] && exit 0 || exit 1'
network: 'PrivRybMarTest'
# Create new test containers
.lxd.cluster
header: 'Create new test Containers'
networks:
PubRybMarTest:
'ipv4.address': '172.16.0.1/24'
'ipv4.nat': true
'ipv6.address': 'none'
'dns.domain': 'nikita'
PrivRybMarTest:
'ipv4.address': '11.10.10.1/24'
'ipv4.nat': false
'ipv6.address': 'none'
'dns.domain': 'nikita'
containers:
node1:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.11', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2201', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
node2:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.12', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2202', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
prevision: ({options}) ->
@tools.ssh.keygen
header: 'SSH key'
target: path.join(__dirname,"./assets/id_rsa")
bits: 2048
key_format: 'PEM'
comment: 'nikita'
provision_container: ({options}) ->
nikita
.system.execute
header: 'Keys permissions'
debug: true
cmd: """
cd ./env/assets
chmod 777 id_rsa id_rsa.pub
"""
@lxd.exec
header: 'Node.js'
container: options.container
cmd: """
command -v node && exit 42
curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n
bash n lts
"""
trap: true
code_skipped: 42
@lxd.file.push
header: 'User Private Key'
container: options.container
gid: 'nikita'
uid: 'nikita'
source: path.join(__dirname,"./assets/id_rsa")
target: '/home/nikita/.ssh/id_rsa'
@lxd.exec
header: 'Root SSH dir'
container: options.container
cmd: 'mkdir -p /root/.ssh && chmod 700 /root/.ssh'
@lxd.file.push
header: 'Root SSH Private Key'
container: options.container
gid: 'root'
uid: 'root'
source: path.join(__dirname,"./assets/id_rsa")
target: '/root/.ssh/id_rsa'
.next (err) ->
throw err if err
| 189210 | nikita = require 'nikita'
path = require 'path'
# Notes:
# SSH private and public keys will be generated in an "assets" directory
# inside the current working directory.
nikita
.log.cli pad: host: 20, header: 60
# Delete existing test containers
.lxd.delete
header: 'Remove existing node1'
if_exec: '[[ `lxc ls | grep node1` ]] && exit 0 || exit 1'
force: true
container: 'node1'
.lxd.delete
header: 'Remove existing node2'
if_exec: '[[ `lxc ls | grep node2` ]] && exit 0 || exit 1'
force: true
container: 'node2'
.lxd.network.delete
header: 'Remove existing Network PubRybMarTest'
if_exec: '[[ `lxc network ls | grep PubRybMarTest` ]] && exit 0 || exit 1'
network: 'PubRybMarTest'
.lxd.network.delete
header: 'Remove existing Network PrivRybMarTest'
if_exec: '[[ `lxc network ls | grep PrivRybMarTest` ]] && exit 0 || exit 1'
network: 'PrivRybMarTest'
# Create new test containers
.lxd.cluster
header: 'Create new test Containers'
networks:
PubRybMarTest:
'ipv4.address': '172.16.0.1/24'
'ipv4.nat': true
'ipv6.address': 'none'
'dns.domain': 'nikita'
PrivRybMarTest:
'ipv4.address': '192.168.127.12/24'
'ipv4.nat': false
'ipv6.address': 'none'
'dns.domain': 'nikita'
containers:
node1:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.11', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2201', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
node2:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.12', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2202', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
prevision: ({options}) ->
@tools.ssh.keygen
header: 'SSH key'
target: path.join(__dirname,"./assets/id_rsa")
bits: 2048
key_format: 'PEM'
comment: 'nikita'
provision_container: ({options}) ->
nikita
.system.execute
header: 'Keys permissions'
debug: true
cmd: """
cd ./env/assets
chmod 777 id_rsa id_rsa.pub
"""
@lxd.exec
header: 'Node.js'
container: options.container
cmd: """
command -v node && exit 42
curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n
bash n lts
"""
trap: true
code_skipped: 42
@lxd.file.push
header: 'User Private Key'
container: options.container
gid: 'nikita'
uid: 'nikita'
source: path.join(__dirname,"./assets/id_rsa")
target: '/home/nikita/.ssh/id_rsa'
@lxd.exec
header: 'Root SSH dir'
container: options.container
cmd: 'mkdir -p /root/.ssh && chmod 700 /root/.ssh'
@lxd.file.push
header: 'Root SSH Private Key'
container: options.container
gid: 'root'
uid: 'root'
source: path.join(__dirname,"./assets/id_rsa")
target: '/root/.ssh/id_rsa'
.next (err) ->
throw err if err
| true | nikita = require 'nikita'
path = require 'path'
# Notes:
# SSH private and public keys will be generated in an "assets" directory
# inside the current working directory.
nikita
.log.cli pad: host: 20, header: 60
# Delete existing test containers
.lxd.delete
header: 'Remove existing node1'
if_exec: '[[ `lxc ls | grep node1` ]] && exit 0 || exit 1'
force: true
container: 'node1'
.lxd.delete
header: 'Remove existing node2'
if_exec: '[[ `lxc ls | grep node2` ]] && exit 0 || exit 1'
force: true
container: 'node2'
.lxd.network.delete
header: 'Remove existing Network PubRybMarTest'
if_exec: '[[ `lxc network ls | grep PubRybMarTest` ]] && exit 0 || exit 1'
network: 'PubRybMarTest'
.lxd.network.delete
header: 'Remove existing Network PrivRybMarTest'
if_exec: '[[ `lxc network ls | grep PrivRybMarTest` ]] && exit 0 || exit 1'
network: 'PrivRybMarTest'
# Create new test containers
.lxd.cluster
header: 'Create new test Containers'
networks:
PubRybMarTest:
'ipv4.address': '172.16.0.1/24'
'ipv4.nat': true
'ipv6.address': 'none'
'dns.domain': 'nikita'
PrivRybMarTest:
'ipv4.address': 'PI:IP_ADDRESS:192.168.127.12END_PI/24'
'ipv4.nat': false
'ipv6.address': 'none'
'dns.domain': 'nikita'
containers:
node1:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.11', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2201', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
node2:
image: 'images:centos/7'
disk:
nikitadir:
path: '/ryba'
source: path.join(__dirname,"./../../../")
nic:
eth0:
config: name: 'eth0', nictype: 'bridged', parent: 'PubRybMarTest'
eth1:
config: name: 'eth1', nictype: 'bridged', parent: 'PrivRybMarTest'
ip: '11.10.10.12', netmask: '255.255.255.0'
proxy:
ssh: listen: 'tcp:0.0.0.0:2202', connect: 'tcp:127.0.0.1:22'
ssh: enabled: true
user:
nikita: sudo: true, authorized_keys: path.join(__dirname,"./assets/id_rsa.pub")
prevision: ({options}) ->
@tools.ssh.keygen
header: 'SSH key'
target: path.join(__dirname,"./assets/id_rsa")
bits: 2048
key_format: 'PEM'
comment: 'nikita'
provision_container: ({options}) ->
nikita
.system.execute
header: 'Keys permissions'
debug: true
cmd: """
cd ./env/assets
chmod 777 id_rsa id_rsa.pub
"""
@lxd.exec
header: 'Node.js'
container: options.container
cmd: """
command -v node && exit 42
curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n
bash n lts
"""
trap: true
code_skipped: 42
@lxd.file.push
header: 'User Private Key'
container: options.container
gid: 'nikita'
uid: 'nikita'
source: path.join(__dirname,"./assets/id_rsa")
target: '/home/nikita/.ssh/id_rsa'
@lxd.exec
header: 'Root SSH dir'
container: options.container
cmd: 'mkdir -p /root/.ssh && chmod 700 /root/.ssh'
@lxd.file.push
header: 'Root SSH Private Key'
container: options.container
gid: 'root'
uid: 'root'
source: path.join(__dirname,"./assets/id_rsa")
target: '/root/.ssh/id_rsa'
.next (err) ->
throw err if err
|
[
{
"context": " {\n \"reset\": false,\n \"cursor\": \"nTZYLOcTQnyB7-Wc72M-kEAcBQdk2EjLaJIRupQWgDXmRwKWzuG5V4se2mvU7y",
"end": 207,
"score": 0.6247762441635132,
"start": 195,
"tag": "KEY",
"value": "nTZYLOcTQnyB"
},
{
"context": "\"reset\": false,\n \"cursor\... | test/src/fast/http/pulled_changes_test.coffee | noamraph/datastore-js | 64 | describe 'Dropbox.Http.PulledChanges', ->
describe '.parse', ->
describe 'on a sample response', ->
beforeEach ->
deltaInfo = {
"reset": false,
"cursor": "nTZYLOcTQnyB7-Wc72M-kEAcBQdk2EjLaJIRupQWgDXmRwKWzuG5V4se2mvU7yzXn4cZSJltoW4tpbqgy0Ezxh1b1p3ygp7wy-vdaYJusujnLAyEsKdYCHPZYZdZt7sQG0BopF2ufAuD56ijYbdX5DhMKe85MFqncnFDvNxSjsodEw-IkCfNZmagDmpOZCxmLqu71hLTApwhqO9-dhm-fk6KSYs-OZwRmVwOE2JAnJbWuifNiM8KwMz5sRBZ5FMJPDqXpOW5PqPCwbkAmKQACbNXFi0k1JuxulpDlQh3zMr3lyLMs-fmaDTTU355mY5xSAXK05Zgs5rPJ6lcaBOUmEBSXcPhxFDHk5NmAdA03Shq04t2_4bupzWX-txT84FmOLNncchl7ZDBCMwyrAzD2kCYOTu1_lhui0C-fiCZgZBKU4OyP6qrkdo4gZu3",
"has_more": true,
"entries": [
[
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
],
[
"/Public",
null
]
]
}
@changes = Dropbox.Http.PulledChanges.parse deltaInfo
it 'parses blankSlate correctly', ->
expect(@changes).to.have.property 'blankSlate'
expect(@changes.blankSlate).to.equal false
it 'parses cursorTag correctly', ->
expect(@changes).to.have.property 'cursorTag'
expect(@changes.cursorTag).to.equal 'nTZYLOcTQnyB7-Wc72M-kEAcBQdk2EjLaJIRupQWgDXmRwKWzuG5V4se2mvU7yzXn4cZSJltoW4tpbqgy0Ezxh1b1p3ygp7wy-vdaYJusujnLAyEsKdYCHPZYZdZt7sQG0BopF2ufAuD56ijYbdX5DhMKe85MFqncnFDvNxSjsodEw-IkCfNZmagDmpOZCxmLqu71hLTApwhqO9-dhm-fk6KSYs-OZwRmVwOE2JAnJbWuifNiM8KwMz5sRBZ5FMJPDqXpOW5PqPCwbkAmKQACbNXFi0k1JuxulpDlQh3zMr3lyLMs-fmaDTTU355mY5xSAXK05Zgs5rPJ6lcaBOUmEBSXcPhxFDHk5NmAdA03Shq04t2_4bupzWX-txT84FmOLNncchl7ZDBCMwyrAzD2kCYOTu1_lhui0C-fiCZgZBKU4OyP6qrkdo4gZu3'
it 'parses shouldPullAgain correctly', ->
expect(@changes).to.have.property 'shouldPullAgain'
expect(@changes.shouldPullAgain).to.equal true
it 'parses shouldBackOff correctly', ->
expect(@changes).to.have.property 'shouldBackOff'
expect(@changes.shouldBackOff).to.equal false
it 'parses changes correctly', ->
expect(@changes).to.have.property 'changes'
expect(@changes.changes).to.have.length 2
expect(@changes.changes[0]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[0].path).to.equal '/Getting_Started.pdf'
expect(@changes.changes[1]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[1].path).to.equal '/Public'
it 'passes null through', ->
expect(Dropbox.Http.PulledChanges.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChanges.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PulledChange', ->
describe '.parse', ->
describe 'on a modification change', ->
beforeEach ->
entry = [
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Getting_Started.pdf'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal false
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.be.instanceOf Dropbox.File.Stat
expect(@changes.stat.path).to.equal @changes.path
describe 'on a deletion change', ->
beforeEach ->
entry = [
"/Public",
null
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Public'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal true
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.equal null
it 'passes null through', ->
expect(Dropbox.Http.PulledChange.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChange.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PollResult', ->
describe '.parse', ->
describe 'on a timeout', ->
beforeEach ->
response = {"changes": false}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
describe 'on a timeout with backoff', ->
beforeEach ->
response = {"changes": false, "backoff": 5}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 5
describe 'on a change report', ->
beforeEach ->
response = {"changes": true}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal true
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
it 'passes null through', ->
expect(Dropbox.Http.PollResult.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PollResult.parse(undefined)).to.equal undefined
| 127199 | describe 'Dropbox.Http.PulledChanges', ->
describe '.parse', ->
describe 'on a sample response', ->
beforeEach ->
deltaInfo = {
"reset": false,
"cursor": "<KEY>7-<KEY>",
"has_more": true,
"entries": [
[
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
],
[
"/Public",
null
]
]
}
@changes = Dropbox.Http.PulledChanges.parse deltaInfo
it 'parses blankSlate correctly', ->
expect(@changes).to.have.property 'blankSlate'
expect(@changes.blankSlate).to.equal false
it 'parses cursorTag correctly', ->
expect(@changes).to.have.property 'cursorTag'
expect(@changes.cursorTag).to.equal '<KEY>'
it 'parses shouldPullAgain correctly', ->
expect(@changes).to.have.property 'shouldPullAgain'
expect(@changes.shouldPullAgain).to.equal true
it 'parses shouldBackOff correctly', ->
expect(@changes).to.have.property 'shouldBackOff'
expect(@changes.shouldBackOff).to.equal false
it 'parses changes correctly', ->
expect(@changes).to.have.property 'changes'
expect(@changes.changes).to.have.length 2
expect(@changes.changes[0]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[0].path).to.equal '/Getting_Started.pdf'
expect(@changes.changes[1]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[1].path).to.equal '/Public'
it 'passes null through', ->
expect(Dropbox.Http.PulledChanges.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChanges.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PulledChange', ->
describe '.parse', ->
describe 'on a modification change', ->
beforeEach ->
entry = [
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Getting_Started.pdf'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal false
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.be.instanceOf Dropbox.File.Stat
expect(@changes.stat.path).to.equal @changes.path
describe 'on a deletion change', ->
beforeEach ->
entry = [
"/Public",
null
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Public'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal true
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.equal null
it 'passes null through', ->
expect(Dropbox.Http.PulledChange.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChange.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PollResult', ->
describe '.parse', ->
describe 'on a timeout', ->
beforeEach ->
response = {"changes": false}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
describe 'on a timeout with backoff', ->
beforeEach ->
response = {"changes": false, "backoff": 5}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 5
describe 'on a change report', ->
beforeEach ->
response = {"changes": true}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal true
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
it 'passes null through', ->
expect(Dropbox.Http.PollResult.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PollResult.parse(undefined)).to.equal undefined
| true | describe 'Dropbox.Http.PulledChanges', ->
describe '.parse', ->
describe 'on a sample response', ->
beforeEach ->
deltaInfo = {
"reset": false,
"cursor": "PI:KEY:<KEY>END_PI7-PI:KEY:<KEY>END_PI",
"has_more": true,
"entries": [
[
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
],
[
"/Public",
null
]
]
}
@changes = Dropbox.Http.PulledChanges.parse deltaInfo
it 'parses blankSlate correctly', ->
expect(@changes).to.have.property 'blankSlate'
expect(@changes.blankSlate).to.equal false
it 'parses cursorTag correctly', ->
expect(@changes).to.have.property 'cursorTag'
expect(@changes.cursorTag).to.equal 'PI:KEY:<KEY>END_PI'
it 'parses shouldPullAgain correctly', ->
expect(@changes).to.have.property 'shouldPullAgain'
expect(@changes.shouldPullAgain).to.equal true
it 'parses shouldBackOff correctly', ->
expect(@changes).to.have.property 'shouldBackOff'
expect(@changes.shouldBackOff).to.equal false
it 'parses changes correctly', ->
expect(@changes).to.have.property 'changes'
expect(@changes.changes).to.have.length 2
expect(@changes.changes[0]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[0].path).to.equal '/Getting_Started.pdf'
expect(@changes.changes[1]).to.be.instanceOf Dropbox.Http.PulledChange
expect(@changes.changes[1].path).to.equal '/Public'
it 'passes null through', ->
expect(Dropbox.Http.PulledChanges.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChanges.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PulledChange', ->
describe '.parse', ->
describe 'on a modification change', ->
beforeEach ->
entry = [
"/Getting_Started.pdf",
{
"size": "225.4KB",
"rev": "35e97029684fe",
"thumb_exists": true, # Changed to test hasThumbnail=true code.
"bytes": 230783,
"modified": "Tue, 19 Jul 2011 21:55:38 +0000",
"client_mtime": "Mon, 18 Jul 2011 18:04:35 +0000",
"path": "/Getting_Started.pdf",
"is_dir": false,
"icon": "page_white_acrobat",
"root": "app_folder", # Changed to test app_folder code path.
"mime_type": "application/pdf",
"revision": 220823
}
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Getting_Started.pdf'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal false
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.be.instanceOf Dropbox.File.Stat
expect(@changes.stat.path).to.equal @changes.path
describe 'on a deletion change', ->
beforeEach ->
entry = [
"/Public",
null
]
@changes = Dropbox.Http.PulledChange.parse entry
it 'parses path correctly', ->
expect(@changes).to.have.property 'path'
expect(@changes.path).to.equal '/Public'
it 'parses wasRemoved correctly', ->
expect(@changes).to.have.property 'wasRemoved'
expect(@changes.wasRemoved).to.equal true
it 'parses stat correctly', ->
expect(@changes).to.have.property 'stat'
expect(@changes.stat).to.equal null
it 'passes null through', ->
expect(Dropbox.Http.PulledChange.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PulledChange.parse(undefined)).to.equal undefined
describe 'Dropbox.Http.PollResult', ->
describe '.parse', ->
describe 'on a timeout', ->
beforeEach ->
response = {"changes": false}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
describe 'on a timeout with backoff', ->
beforeEach ->
response = {"changes": false, "backoff": 5}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal false
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 5
describe 'on a change report', ->
beforeEach ->
response = {"changes": true}
@result = Dropbox.Http.PollResult.parse response
it 'parses hasChanges correctly', ->
expect(@result).to.have.property 'hasChanges'
expect(@result.hasChanges).to.equal true
it 'parses retryAfter correctly', ->
expect(@result).to.have.property 'retryAfter'
expect(@result.retryAfter).to.equal 0
it 'passes null through', ->
expect(Dropbox.Http.PollResult.parse(null)).to.equal null
it 'passes undefined through', ->
expect(Dropbox.Http.PollResult.parse(undefined)).to.equal undefined
|
[
{
"context": "# Copyright 2013 Andrey Antukh <niwi@niwi.be>\n#\n# Licensed under the Apache Lice",
"end": 30,
"score": 0.9998833537101746,
"start": 17,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "# Copyright 2013 Andrey Antukh <niwi@niwi.be>\n#\n# Licensed under the Apac... | mrfogg-front/app/coffee/plugins/confirm.coffee | PIWEEK/mrfogg | 0 | # Copyright 2013 Andrey Antukh <niwi@niwi.be>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SimpleConfirmProvider = ($rootScope, $q, $window) ->
service = {}
service.confirm = (message) ->
defered = $q.defer()
_.defer ->
res = $window.confirm(message)
if res
defered.resolve()
else
defered.reject()
$rootScope.$apply()
return defered.promise
return service
module = angular.module('gmConfirm', [])
module.factory('$confirm', ["$rootScope", "$q", "$window", SimpleConfirmProvider])
| 39284 | # Copyright 2013 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SimpleConfirmProvider = ($rootScope, $q, $window) ->
service = {}
service.confirm = (message) ->
defered = $q.defer()
_.defer ->
res = $window.confirm(message)
if res
defered.resolve()
else
defered.reject()
$rootScope.$apply()
return defered.promise
return service
module = angular.module('gmConfirm', [])
module.factory('$confirm', ["$rootScope", "$q", "$window", SimpleConfirmProvider])
| true | # Copyright 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SimpleConfirmProvider = ($rootScope, $q, $window) ->
service = {}
service.confirm = (message) ->
defered = $q.defer()
_.defer ->
res = $window.confirm(message)
if res
defered.resolve()
else
defered.reject()
$rootScope.$apply()
return defered.promise
return service
module = angular.module('gmConfirm', [])
module.factory('$confirm', ["$rootScope", "$q", "$window", SimpleConfirmProvider])
|
[
{
"context": "hammersport state. Danger: spammy.\n#\n# Author:\n# smashwilson\n\nChalkCircle = require './chalkcircle'\n_ = requir",
"end": 769,
"score": 0.9990968704223633,
"start": 758,
"tag": "USERNAME",
"value": "smashwilson"
},
{
"context": "someone = robot.brain.userForId '2'... | src/index.coffee | smashwilson/hubot-hammersport | 2 | # Description:
# Battle your way to the top in the chalk circle.
#
# Configuration:
# HUBOT_HAMMERSPORT_ROOMS - comma-separated list of rooms to restrict hammersport chatter to.
# HUBOT_HAMMERSPORT_MOVES - path to a file containing available moves.
#
# Commands:
# hubot hammersport <user> - Challenge another user to a duel
# hubot hammer accept - Accept a challenge.
# hubot hammer decline - Decline a challenge.
# hubot hammer <n> - Choose an attack during a hammersport round.
# hubot hammeradmin respawn <user>|everyone - Respawn a chosen user at full health.
# hubot hammeradmin kill <user>|everyone - Instakill a chosen user.
# hubot hammeradmin report <user> - Show a summary of hammersport state. Danger: spammy.
#
# Author:
# smashwilson
ChalkCircle = require './chalkcircle'
_ = require 'underscore'
ADMIN_ROLE = 'hammondsport mayor'
module.exports = (robot) ->
createTestUser = ->
someone = robot.brain.userForId '2',
name: 'someone'
room: 'thechalkcircle'
theCircle.getChallenger(someone)
theCircle = new ChalkCircle(robot)
robot.respond /hammersport (\S+)/i, (msg) ->
createTestUser() if process.env.HUBOT_DEBUG?
challengers = []
for username in [msg.message.user.name, msg.match[1]]
username = username.replace /^@/, ''
user = robot.brain.userForName username
if user?
challengers.push theCircle.getChallenger(user)
else
msg.reply "I don't know anyone named #{username}!
Notice that they have to speak first, for me to notice them."
return
for challenger in challengers
unless challenger.isAlive()
msg.reply "#{challenger.displayName()} is dead!"
return
if challenger.isInCombat()
msg.reply "#{challenger.displayName()} is already fighting!"
return
m = theCircle.startMatch(challengers)
m.challengeOffered msg
robot.respond /hammer accept/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /hammer decline/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeDeclined msg
robot.respond /hammer (\d)/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
isAdmin = (msg) ->
unless robot.auth.hasRole(msg.message.user, ADMIN_ROLE)
msg.reply "You can't do that! You're not a *#{ADMIN_ROLE}*."
return false
return true
challengersFrom = (msg) ->
username = msg.match[1]
if username? and username isnt 'everyone'
user = robot.brain.userForName username
unless user?
msg.reply "I don't know who #{username} is."
return
[theCircle.getChallenger(user)]
else
theCircle.allChallengers()
reportAction = (count, action) ->
if count is 1
verbPhrase = "challenger has"
else
verbPhrase = "challengers have"
"#{count} hammersport #{verbPhrase} been #{action}."
robot.respond /hammeradmin respawn @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.respawn() for c in challengers
msg.reply reportAction challengers.length, 'respawned'
robot.respond /hammeradmin kill @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.kill() for c in challengers
msg.reply reportAction challengers.length, 'killed'
robot.respond /hammeradmin report(?: @?(\w+))?/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
sorted = _.sortBy challengers, (c) -> c.displayName()
lines = []
for c in sorted
line = "*#{c.displayName()}*: #{c.hp()}/#{c.maxHP()} HP #{c.exp()} EXP"
unless c.isAlive()
line += " _will respawn #{c.nextRespawn().fromNow()}_"
lines.push line
msg.send lines.join("\n")
if process.env.HUBOT_DEBUG?
robot.respond /dhammer accept/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /dhammer (\d+)/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
| 128351 | # Description:
# Battle your way to the top in the chalk circle.
#
# Configuration:
# HUBOT_HAMMERSPORT_ROOMS - comma-separated list of rooms to restrict hammersport chatter to.
# HUBOT_HAMMERSPORT_MOVES - path to a file containing available moves.
#
# Commands:
# hubot hammersport <user> - Challenge another user to a duel
# hubot hammer accept - Accept a challenge.
# hubot hammer decline - Decline a challenge.
# hubot hammer <n> - Choose an attack during a hammersport round.
# hubot hammeradmin respawn <user>|everyone - Respawn a chosen user at full health.
# hubot hammeradmin kill <user>|everyone - Instakill a chosen user.
# hubot hammeradmin report <user> - Show a summary of hammersport state. Danger: spammy.
#
# Author:
# smashwilson
ChalkCircle = require './chalkcircle'
_ = require 'underscore'
ADMIN_ROLE = 'hammondsport mayor'
module.exports = (robot) ->
createTestUser = ->
someone = robot.brain.userForId '2',
name: '<NAME>'
room: 'thechalkcircle'
theCircle.getChallenger(someone)
theCircle = new ChalkCircle(robot)
robot.respond /hammersport (\S+)/i, (msg) ->
createTestUser() if process.env.HUBOT_DEBUG?
challengers = []
for username in [msg.message.user.name, msg.match[1]]
username = username.replace /^@/, ''
user = robot.brain.userForName username
if user?
challengers.push theCircle.getChallenger(user)
else
msg.reply "I don't know anyone named #{username}!
Notice that they have to speak first, for me to notice them."
return
for challenger in challengers
unless challenger.isAlive()
msg.reply "#{challenger.displayName()} is dead!"
return
if challenger.isInCombat()
msg.reply "#{challenger.displayName()} is already fighting!"
return
m = theCircle.startMatch(challengers)
m.challengeOffered msg
robot.respond /hammer accept/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /hammer decline/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeDeclined msg
robot.respond /hammer (\d)/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
isAdmin = (msg) ->
unless robot.auth.hasRole(msg.message.user, ADMIN_ROLE)
msg.reply "You can't do that! You're not a *#{ADMIN_ROLE}*."
return false
return true
challengersFrom = (msg) ->
username = msg.match[1]
if username? and username isnt 'everyone'
user = robot.brain.userForName username
unless user?
msg.reply "I don't know who #{username} is."
return
[theCircle.getChallenger(user)]
else
theCircle.allChallengers()
reportAction = (count, action) ->
if count is 1
verbPhrase = "challenger has"
else
verbPhrase = "challengers have"
"#{count} hammersport #{verbPhrase} been #{action}."
robot.respond /hammeradmin respawn @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.respawn() for c in challengers
msg.reply reportAction challengers.length, 'respawned'
robot.respond /hammeradmin kill @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.kill() for c in challengers
msg.reply reportAction challengers.length, 'killed'
robot.respond /hammeradmin report(?: @?(\w+))?/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
sorted = _.sortBy challengers, (c) -> c.displayName()
lines = []
for c in sorted
line = "*#{c.displayName()}*: #{c.hp()}/#{c.maxHP()} HP #{c.exp()} EXP"
unless c.isAlive()
line += " _will respawn #{c.nextRespawn().fromNow()}_"
lines.push line
msg.send lines.join("\n")
if process.env.HUBOT_DEBUG?
robot.respond /dhammer accept/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /dhammer (\d+)/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
| true | # Description:
# Battle your way to the top in the chalk circle.
#
# Configuration:
# HUBOT_HAMMERSPORT_ROOMS - comma-separated list of rooms to restrict hammersport chatter to.
# HUBOT_HAMMERSPORT_MOVES - path to a file containing available moves.
#
# Commands:
# hubot hammersport <user> - Challenge another user to a duel
# hubot hammer accept - Accept a challenge.
# hubot hammer decline - Decline a challenge.
# hubot hammer <n> - Choose an attack during a hammersport round.
# hubot hammeradmin respawn <user>|everyone - Respawn a chosen user at full health.
# hubot hammeradmin kill <user>|everyone - Instakill a chosen user.
# hubot hammeradmin report <user> - Show a summary of hammersport state. Danger: spammy.
#
# Author:
# smashwilson
ChalkCircle = require './chalkcircle'
_ = require 'underscore'
ADMIN_ROLE = 'hammondsport mayor'
module.exports = (robot) ->
createTestUser = ->
someone = robot.brain.userForId '2',
name: 'PI:NAME:<NAME>END_PI'
room: 'thechalkcircle'
theCircle.getChallenger(someone)
theCircle = new ChalkCircle(robot)
robot.respond /hammersport (\S+)/i, (msg) ->
createTestUser() if process.env.HUBOT_DEBUG?
challengers = []
for username in [msg.message.user.name, msg.match[1]]
username = username.replace /^@/, ''
user = robot.brain.userForName username
if user?
challengers.push theCircle.getChallenger(user)
else
msg.reply "I don't know anyone named #{username}!
Notice that they have to speak first, for me to notice them."
return
for challenger in challengers
unless challenger.isAlive()
msg.reply "#{challenger.displayName()} is dead!"
return
if challenger.isInCombat()
msg.reply "#{challenger.displayName()} is already fighting!"
return
m = theCircle.startMatch(challengers)
m.challengeOffered msg
robot.respond /hammer accept/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /hammer decline/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.challengeDeclined msg
robot.respond /hammer (\d)/i, (msg) ->
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
isAdmin = (msg) ->
unless robot.auth.hasRole(msg.message.user, ADMIN_ROLE)
msg.reply "You can't do that! You're not a *#{ADMIN_ROLE}*."
return false
return true
challengersFrom = (msg) ->
username = msg.match[1]
if username? and username isnt 'everyone'
user = robot.brain.userForName username
unless user?
msg.reply "I don't know who #{username} is."
return
[theCircle.getChallenger(user)]
else
theCircle.allChallengers()
reportAction = (count, action) ->
if count is 1
verbPhrase = "challenger has"
else
verbPhrase = "challengers have"
"#{count} hammersport #{verbPhrase} been #{action}."
robot.respond /hammeradmin respawn @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.respawn() for c in challengers
msg.reply reportAction challengers.length, 'respawned'
robot.respond /hammeradmin kill @?(\w+)/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
c.kill() for c in challengers
msg.reply reportAction challengers.length, 'killed'
robot.respond /hammeradmin report(?: @?(\w+))?/i, (msg) ->
return unless isAdmin(msg)
challengers = challengersFrom(msg)
sorted = _.sortBy challengers, (c) -> c.displayName()
lines = []
for c in sorted
line = "*#{c.displayName()}*: #{c.hp()}/#{c.maxHP()} HP #{c.exp()} EXP"
unless c.isAlive()
line += " _will respawn #{c.nextRespawn().fromNow()}_"
lines.push line
msg.send lines.join("\n")
if process.env.HUBOT_DEBUG?
robot.respond /dhammer accept/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.challengeAccepted msg
robot.respond /dhammer (\d+)/i, (msg) ->
user = robot.brain.userForName 'someone'
msg.message.user = user
theCircle.withActiveMatch msg, (m) -> m.chooseMove msg
|
[
{
"context": "space COOLSTRAP.Util\n * @class Core\n * \n * @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera\n * Insp",
"end": 110,
"score": 0.9998941421508789,
"start": 95,
"tag": "NAME",
"value": "Abraham Barrera"
},
{
"context": "il\n * @class Core\n * \n * @autho... | coolstrap-core/app/assets/javascripts/coolstrap/util/_Coolstrap.Util.Core.coffee | cristianferrarig/coolstrap | 0 | ###
* Coolstrap Core functions
*
* @namespace COOLSTRAP.Util
* @class Core
*
* @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Core = ((cool) ->
_toArray = (obj) ->
Array::slice.call obj, 0
_getType = (obj) ->
Object::toString.call(obj).match(/\s([a-z|A-Z]+)/)[1].toLowerCase()
###
* Executes callbacks based on the parameters received.
*
* @method execute
*
* @param {Function} callback to execute
###
execute = ->
args = _toArray(arguments)
callback = args.shift()
callback.apply null, args if _getType(callback) is "function"
###
* Mix two objects
*
* @method extend
*
* @param {object} arguments to mix them all into a new object.
* @return {object} child a new object with all the objects from the arguments mixed.
###
extend = ->
child = child or {}
arg = 0
len = arguments.length
while arg < len
argument = arguments[arg]
for prop of argument
child[prop] = argument[prop]
arg++
child
execute: execute
extend: extend
)(COOLSTRAP) | 56473 | ###
* Coolstrap Core functions
*
* @namespace COOLSTRAP.Util
* @class Core
*
* @author <NAME> <<EMAIL>> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Core = ((cool) ->
_toArray = (obj) ->
Array::slice.call obj, 0
_getType = (obj) ->
Object::toString.call(obj).match(/\s([a-z|A-Z]+)/)[1].toLowerCase()
###
* Executes callbacks based on the parameters received.
*
* @method execute
*
* @param {Function} callback to execute
###
execute = ->
args = _toArray(arguments)
callback = args.shift()
callback.apply null, args if _getType(callback) is "function"
###
* Mix two objects
*
* @method extend
*
* @param {object} arguments to mix them all into a new object.
* @return {object} child a new object with all the objects from the arguments mixed.
###
extend = ->
child = child or {}
arg = 0
len = arguments.length
while arg < len
argument = arguments[arg]
for prop of argument
child[prop] = argument[prop]
arg++
child
execute: execute
extend: extend
)(COOLSTRAP) | true | ###
* Coolstrap Core functions
*
* @namespace COOLSTRAP.Util
* @class Core
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Core = ((cool) ->
_toArray = (obj) ->
Array::slice.call obj, 0
_getType = (obj) ->
Object::toString.call(obj).match(/\s([a-z|A-Z]+)/)[1].toLowerCase()
###
* Executes callbacks based on the parameters received.
*
* @method execute
*
* @param {Function} callback to execute
###
execute = ->
args = _toArray(arguments)
callback = args.shift()
callback.apply null, args if _getType(callback) is "function"
###
* Mix two objects
*
* @method extend
*
* @param {object} arguments to mix them all into a new object.
* @return {object} child a new object with all the objects from the arguments mixed.
###
extend = ->
child = child or {}
arg = 0
len = arguments.length
while arg < len
argument = arguments[arg]
for prop of argument
child[prop] = argument[prop]
arg++
child
execute: execute
extend: extend
)(COOLSTRAP) |
[
{
"context": "es(128)).digest('base64')\ntoken_key = (token) -> \"oauth:token:#{token}\"\n\nmodule.exports = class Storage extends r",
"end": 156,
"score": 0.8506656289100647,
"start": 143,
"tag": "KEY",
"value": "oauth:token:#"
}
] | lib/storage/redis.coffee | danielmwai/oauth2-node | 0 | crypto = require 'crypto'
gen_token = -> crypto.createHash('sha512').update(crypto.randomBytes(128)).digest('base64')
token_key = (token) -> "oauth:token:#{token}"
module.exports = class Storage extends require('../storage')
constructor: ->
super
@._accessor 'client'
throw new Error "Please provide 'client' for RedisStorage" unless @client
save_token_data: (data, done) ->
token = gen_token()
key = token_key token
json = JSON.stringify data
if data.expire or data.expire_at
expire = data.expire or (data.expire_at - Date.now())
@client.setex key, expire, json, (error) -> done(error, token)
else
@client.set key, json, (error) -> done(error, token)
get_token_data: (token, done) ->
@client.get token_key(token), (error, json) ->
return done error or "Invalid token" if error or !json
try
done null, JSON.parse(json)
catch error
done error
delete_token_data: (token, done) ->
@client.del token_key(token), done
| 69118 | crypto = require 'crypto'
gen_token = -> crypto.createHash('sha512').update(crypto.randomBytes(128)).digest('base64')
token_key = (token) -> "<KEY>{token}"
module.exports = class Storage extends require('../storage')
constructor: ->
super
@._accessor 'client'
throw new Error "Please provide 'client' for RedisStorage" unless @client
save_token_data: (data, done) ->
token = gen_token()
key = token_key token
json = JSON.stringify data
if data.expire or data.expire_at
expire = data.expire or (data.expire_at - Date.now())
@client.setex key, expire, json, (error) -> done(error, token)
else
@client.set key, json, (error) -> done(error, token)
get_token_data: (token, done) ->
@client.get token_key(token), (error, json) ->
return done error or "Invalid token" if error or !json
try
done null, JSON.parse(json)
catch error
done error
delete_token_data: (token, done) ->
@client.del token_key(token), done
| true | crypto = require 'crypto'
gen_token = -> crypto.createHash('sha512').update(crypto.randomBytes(128)).digest('base64')
token_key = (token) -> "PI:KEY:<KEY>END_PI{token}"
module.exports = class Storage extends require('../storage')
constructor: ->
super
@._accessor 'client'
throw new Error "Please provide 'client' for RedisStorage" unless @client
save_token_data: (data, done) ->
token = gen_token()
key = token_key token
json = JSON.stringify data
if data.expire or data.expire_at
expire = data.expire or (data.expire_at - Date.now())
@client.setex key, expire, json, (error) -> done(error, token)
else
@client.set key, json, (error) -> done(error, token)
get_token_data: (token, done) ->
@client.get token_key(token), (error, json) ->
return done error or "Invalid token" if error or !json
try
done null, JSON.parse(json)
catch error
done error
delete_token_data: (token, done) ->
@client.del token_key(token), done
|
[
{
"context": " 'read'\n\nawait read { prompt : \"What is your name, Droote?\" }, defer err, r1\nawait setTimeout defer(), 10\na",
"end": 92,
"score": 0.9997985363006592,
"start": 86,
"tag": "NAME",
"value": "Droote"
}
] | node_modules/iced-expect/test/bin/p1.iced | AngelKey/Angelkey.nodeclient | 151 | #!/usr/bin/env iced
read = require 'read'
await read { prompt : "What is your name, Droote?" }, defer err, r1
await setTimeout defer(), 10
await read {prompt : "Have you seen Jabbers?", silent : true }, defer err, r2
await setTimeout defer(), 20
await read { prompt: "Love those dogs" }, defer err, r3
await setTimeout defer(), 30
if r3 is 'yes'
for i in [0..4]
await read { prompt : "You good?" }, defer err, good
await setTimeout defer(), 2
console.log [ r1, r2, r3 ].join (":")
process.exit 0 | 89076 | #!/usr/bin/env iced
read = require 'read'
await read { prompt : "What is your name, <NAME>?" }, defer err, r1
await setTimeout defer(), 10
await read {prompt : "Have you seen Jabbers?", silent : true }, defer err, r2
await setTimeout defer(), 20
await read { prompt: "Love those dogs" }, defer err, r3
await setTimeout defer(), 30
if r3 is 'yes'
for i in [0..4]
await read { prompt : "You good?" }, defer err, good
await setTimeout defer(), 2
console.log [ r1, r2, r3 ].join (":")
process.exit 0 | true | #!/usr/bin/env iced
read = require 'read'
await read { prompt : "What is your name, PI:NAME:<NAME>END_PI?" }, defer err, r1
await setTimeout defer(), 10
await read {prompt : "Have you seen Jabbers?", silent : true }, defer err, r2
await setTimeout defer(), 20
await read { prompt: "Love those dogs" }, defer err, r3
await setTimeout defer(), 30
if r3 is 'yes'
for i in [0..4]
await read { prompt : "You good?" }, defer err, good
await setTimeout defer(), 2
console.log [ r1, r2, r3 ].join (":")
process.exit 0 |
[
{
"context": "ope.parentWidget\n\n setting = {}\n setting.key = \"chart-filters\"\n setting.isInitialized = false\n\n setting.initi",
"end": 209,
"score": 0.9895198941230774,
"start": 196,
"tag": "KEY",
"value": "chart-filters"
}
] | src/components/widgets-settings/chart-filters/chart-filters.directive.coffee | agranado2k/impac-angular | 7 | module = angular.module('impac.components.widgets-settings.chart-filters',[])
module.controller('SettingChartFiltersCtrl', ($scope) ->
w = $scope.parentWidget
setting = {}
setting.key = "chart-filters"
setting.isInitialized = false
setting.initialize = ->
if w.content? && w.content.chart_filter? && $scope.filterCriteria = w.content.chart_filter.criteria
$scope.maxEntities = w.content.chart_filter.max
$scope.entityType = w.content.chart_filter.entity_type
$scope.filterLabel = w.content.chart_filter.filter_label.replace(/_/g," ")
if $scope.filterCriteria == "number"
$scope.filterValuePercentage = 80
$scope.filterValueNumber = w.content.chart_filter.value
else
$scope.filterValuePercentage = w.content.chart_filter.value
$scope.filterValueNumber = Math.round($scope.maxEntities/2)
setting.isInitialized = true
setting.toMetadata = ->
if w.content? && w.content.chart_filter?
if $scope.filterCriteria == "percentage"
filterValue = $scope.filterValuePercentage
else
filterValue = $scope.filterValueNumber
return { chart_filter: {criteria: $scope.filterCriteria, value: filterValue} }
else
return {}
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingChartFilters', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
},
template: $templateCache.get('widgets-settings/chart-filters.tmpl.html'),
controller: 'SettingChartFiltersCtrl'
}
)
| 11053 | module = angular.module('impac.components.widgets-settings.chart-filters',[])
module.controller('SettingChartFiltersCtrl', ($scope) ->
w = $scope.parentWidget
setting = {}
setting.key = "<KEY>"
setting.isInitialized = false
setting.initialize = ->
if w.content? && w.content.chart_filter? && $scope.filterCriteria = w.content.chart_filter.criteria
$scope.maxEntities = w.content.chart_filter.max
$scope.entityType = w.content.chart_filter.entity_type
$scope.filterLabel = w.content.chart_filter.filter_label.replace(/_/g," ")
if $scope.filterCriteria == "number"
$scope.filterValuePercentage = 80
$scope.filterValueNumber = w.content.chart_filter.value
else
$scope.filterValuePercentage = w.content.chart_filter.value
$scope.filterValueNumber = Math.round($scope.maxEntities/2)
setting.isInitialized = true
setting.toMetadata = ->
if w.content? && w.content.chart_filter?
if $scope.filterCriteria == "percentage"
filterValue = $scope.filterValuePercentage
else
filterValue = $scope.filterValueNumber
return { chart_filter: {criteria: $scope.filterCriteria, value: filterValue} }
else
return {}
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingChartFilters', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
},
template: $templateCache.get('widgets-settings/chart-filters.tmpl.html'),
controller: 'SettingChartFiltersCtrl'
}
)
| true | module = angular.module('impac.components.widgets-settings.chart-filters',[])
module.controller('SettingChartFiltersCtrl', ($scope) ->
w = $scope.parentWidget
setting = {}
setting.key = "PI:KEY:<KEY>END_PI"
setting.isInitialized = false
setting.initialize = ->
if w.content? && w.content.chart_filter? && $scope.filterCriteria = w.content.chart_filter.criteria
$scope.maxEntities = w.content.chart_filter.max
$scope.entityType = w.content.chart_filter.entity_type
$scope.filterLabel = w.content.chart_filter.filter_label.replace(/_/g," ")
if $scope.filterCriteria == "number"
$scope.filterValuePercentage = 80
$scope.filterValueNumber = w.content.chart_filter.value
else
$scope.filterValuePercentage = w.content.chart_filter.value
$scope.filterValueNumber = Math.round($scope.maxEntities/2)
setting.isInitialized = true
setting.toMetadata = ->
if w.content? && w.content.chart_filter?
if $scope.filterCriteria == "percentage"
filterValue = $scope.filterValuePercentage
else
filterValue = $scope.filterValueNumber
return { chart_filter: {criteria: $scope.filterCriteria, value: filterValue} }
else
return {}
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingChartFilters', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
},
template: $templateCache.get('widgets-settings/chart-filters.tmpl.html'),
controller: 'SettingChartFiltersCtrl'
}
)
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999159574508667,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/beatmap-helper.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapHelper
@default: ({group, items, mode}) =>
if items?
return _.findLast(items, (i) -> !i.deleted_at? && !i.convert) ? _.last(items)
return unless group?
modes = if mode? then [mode] else @modes
for mode in modes
beatmap = @default items: group[mode]
return beatmap if beatmap?
@find: ({group, id, mode}) =>
modes = if mode? then [mode] else @modes
for mode in modes
item = _.find group[mode], id: id
return item if item?
@getDiffRating: (rating) ->
if rating < 2
'easy'
else if rating < 2.7
'normal'
else if rating < 4
'hard'
else if rating < 5.3
'insane'
else if rating < 6.5
'expert'
else
'expert-plus'
@group: (beatmaps) =>
grouped = _.groupBy beatmaps, 'mode'
for own mode, items of grouped
grouped[mode] = @sort items
grouped
@modes: ['osu', 'taiko', 'fruits', 'mania']
@sort: (beatmaps) ->
if beatmaps[0].mode == 'mania'
_.orderBy beatmaps, ['convert', 'cs', 'difficulty_rating'], ['desc', 'asc', 'asc']
else
_.orderBy beatmaps, ['convert', 'difficulty_rating'], ['desc', 'asc']
| 124458 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapHelper
@default: ({group, items, mode}) =>
if items?
return _.findLast(items, (i) -> !i.deleted_at? && !i.convert) ? _.last(items)
return unless group?
modes = if mode? then [mode] else @modes
for mode in modes
beatmap = @default items: group[mode]
return beatmap if beatmap?
@find: ({group, id, mode}) =>
modes = if mode? then [mode] else @modes
for mode in modes
item = _.find group[mode], id: id
return item if item?
@getDiffRating: (rating) ->
if rating < 2
'easy'
else if rating < 2.7
'normal'
else if rating < 4
'hard'
else if rating < 5.3
'insane'
else if rating < 6.5
'expert'
else
'expert-plus'
@group: (beatmaps) =>
grouped = _.groupBy beatmaps, 'mode'
for own mode, items of grouped
grouped[mode] = @sort items
grouped
@modes: ['osu', 'taiko', 'fruits', 'mania']
@sort: (beatmaps) ->
if beatmaps[0].mode == 'mania'
_.orderBy beatmaps, ['convert', 'cs', 'difficulty_rating'], ['desc', 'asc', 'asc']
else
_.orderBy beatmaps, ['convert', 'difficulty_rating'], ['desc', 'asc']
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapHelper
@default: ({group, items, mode}) =>
if items?
return _.findLast(items, (i) -> !i.deleted_at? && !i.convert) ? _.last(items)
return unless group?
modes = if mode? then [mode] else @modes
for mode in modes
beatmap = @default items: group[mode]
return beatmap if beatmap?
@find: ({group, id, mode}) =>
modes = if mode? then [mode] else @modes
for mode in modes
item = _.find group[mode], id: id
return item if item?
@getDiffRating: (rating) ->
if rating < 2
'easy'
else if rating < 2.7
'normal'
else if rating < 4
'hard'
else if rating < 5.3
'insane'
else if rating < 6.5
'expert'
else
'expert-plus'
@group: (beatmaps) =>
grouped = _.groupBy beatmaps, 'mode'
for own mode, items of grouped
grouped[mode] = @sort items
grouped
@modes: ['osu', 'taiko', 'fruits', 'mania']
@sort: (beatmaps) ->
if beatmaps[0].mode == 'mania'
_.orderBy beatmaps, ['convert', 'cs', 'difficulty_rating'], ['desc', 'asc', 'asc']
else
_.orderBy beatmaps, ['convert', 'difficulty_rating'], ['desc', 'asc']
|
[
{
"context": "ame] for name, p of @pairs\n\n toDict: ->\n name: @name\n pairs: (name for name, obj of @pairs)\n\ncl",
"end": 1600,
"score": 0.662643551826477,
"start": 1600,
"tag": "NAME",
"value": ""
}
] | src/server.coffee | robert-corlett/Log.io | 1 | ### Log.io Log Server
Relays inbound log messages to web clients
LogServer receives log messages via TCP:
"+log|my_stream|my_server_host|info|this is a log message\r\n"
Announce a node, optionally with stream associations
"+node|my_server_host\r\n"
"+node|my_server_host|my_stream1,my_stream2,my_stream3\r\n"
Announce a stream, optionally with node associations
"+stream|my_stream1\r\n"
"+stream|my_stream1|my_server_host1,my_host_server2\r\n"
Remove a node or stream
"-node|my_server_host1\r\n"
"-stream|stream2\r\n"
WebServer listens for events emitted by LogServer and
forwards them to web clients via socket.io
# Usage:
logServer = new LogServer port: 28777
webServer = new WebServer logServer, port: 28778
webServer.run()
###
fs = require 'fs'
net = require 'net'
http = require 'http'
https = require 'https'
io = require 'socket.io'
events = require 'events'
winston = require 'winston'
express = require 'express'
class _LogObject
_type: 'object'
_pclass: ->
_pcollection: ->
constructor: (@logServer, @name, _pairs=[]) ->
@logServer.emit "add_#{@_type}", @
@pairs = {}
@pclass = @_pclass()
@pcollection = @_pcollection()
@addPair pname for pname in _pairs
addPair: (pname) ->
if not pair = @pairs[pname]
if not pair = @pcollection[pname]
pair = @pcollection[pname] = new @pclass @logServer, pname
pair.pairs[@name] = @
@pairs[pname] = pair
@logServer.emit "add_#{@_type}_pair", @, pname
remove: ->
@logServer.emit "remove_#{@_type}", @
delete p.pairs[@name] for name, p of @pairs
toDict: ->
name: @name
pairs: (name for name, obj of @pairs)
class LogNode extends _LogObject
_type: 'node'
_pclass: -> LogStream
_pcollection: -> @logServer.logStreams
class LogStream extends _LogObject
_type: 'stream'
_pclass: -> LogNode
_pcollection: -> @logServer.logNodes
###
LogServer listens for TCP connections. It parses & validates
inbound TCP messages, and emits events.
###
class LogServer extends events.EventEmitter
constructor: (config={}) ->
{@host, @port} = config
@_log = config.logging ? winston
@_delimiter = config.delimiter ? '\r\n'
@logNodes = {}
@logStreams = {}
run: ->
# Create TCP listener socket
@listener = net.createServer (socket) =>
socket._buffer = ''
socket.on 'data', (data) => @_receive data, socket
socket.on 'error', (e) =>
@_log.error 'Lost TCP connection...'
@_removeNode socket.node.name if socket.node
@listener.listen @port, @host
_receive: (data, socket) =>
part = data.toString()
socket._buffer += part
@_log.debug "Received TCP message: #{part}"
@_flush socket if socket._buffer.indexOf @_delimiter >= 0
_flush: (socket) =>
# Handle messages in socket buffer
# Pause socket while modifying buffer
socket.pause()
[msgs..., socket._buffer] = socket._buffer.split @_delimiter
socket.resume()
@_handle socket, msg for msg in msgs
_handle: (socket, msg) ->
@_log.debug "Handling message: #{msg}"
[mtype, args...] = msg.split '|'
switch mtype
when '+log' then @_newLog args...
when '+node' then @_addNode args...
when '+stream' then @_addStream args...
when '-node' then @_removeNode args...
when '-stream' then @_removeStream args...
when '+bind' then @_bindNode socket, args...
else @_log.error "Invalid TCP message: #{msg}"
_addNode: (nname, snames='') ->
@__add nname, snames, @logNodes, LogNode, 'node'
_addStream: (sname, nnames='') ->
@__add sname, nnames, @logStreams, LogStream, 'stream'
_removeNode: (nname) ->
@__remove nname, @logNodes, 'node'
_removeStream: (sname) ->
@__remove sname, @logStreams, 'stream'
_newLog: (sname, nname, logLevel, message...) ->
message = message.join '|'
@_log.debug "Log message: (#{sname}, #{nname}, #{logLevel}) #{message}"
node = @logNodes[nname] or @_addNode nname, sname
stream = @logStreams[sname] or @_addStream sname, nname
@emit 'new_log', stream, node, logLevel, message
__add: (name, pnames, _collection, _objClass, objName) ->
@_log.info "Adding #{objName}: #{name} (#{pnames})"
pnames = pnames.split ','
obj = _collection[name] = _collection[name] or new _objClass @, name, pnames
obj.addPair p for p in pnames when not obj.pairs[p]
__remove: (name, _collection, objType) ->
if obj = _collection[name]
@_log.info "Removing #{objType}: #{name}"
obj.remove()
delete _collection[name]
_bindNode: (socket, obj, nname) ->
if node = @logNodes[nname]
@_log.info "Binding node '#{nname}' to TCP socket"
socket.node = node
setInterval (-> socket.write 'ping'), 2000
###
WebServer relays LogServer events to web clients via socket.io.
###
class WebServer
constructor: (@logServer, config) ->
{@host, @port, @auth} = config
{@logNodes, @logStreams} = @logServer
@restrictSocket = config.restrictSocket ? '*:*'
@_log = config.logging ? winston
# Create express server
app = @_buildServer config
@http = @_createServer config, app
_buildServer: (config) ->
app = express()
if @auth?
app.use express.basicAuth @auth.user, @auth.pass
if config.restrictHTTP
ips = new RegExp config.restrictHTTP.join '|'
app.all '/', (req, res, next) =>
if not req.ip.match ips
return res.send 403, "Your IP (#{req.ip}) is not allowed."
next()
staticPath = config.staticPath ? __dirname + '/../'
app.use express.static staticPath
_createServer: (config, app) ->
if config.ssl
return https.createServer {
key: fs.readFileSync config.ssl.key
cert: fs.readFileSync config.ssl.cert
}, app
else
return http.createServer app
run: ->
@_log.info 'Starting Log.io Web Server...'
@logServer.run()
io = io.listen @http.listen @port, @host
io.set 'log level', 1
io.set 'origins', @restrictSocket
@listener = io.sockets
_on = (args...) => @logServer.on args...
_emit = (_event, msg) =>
@_log.debug "Relaying: #{_event}"
@listener.emit _event, msg
# Bind events from LogServer to web client
_on 'add_node', (node) ->
_emit 'add_node', node.toDict()
_on 'add_stream', (stream) ->
_emit 'add_stream', stream.toDict()
_on 'add_stream_pair', (stream, nname) ->
_emit 'add_pair', {stream: stream.name, node: nname}
_on 'add_node_pair', (node, sname) ->
_emit 'add_pair', {stream: sname, node: node.name}
_on 'remove_node', (node) ->
_emit 'remove_node', node.toDict()
_on 'remove_stream', (stream) ->
_emit 'remove_stream', stream.toDict()
# Bind new log event from Logserver to web client
_on 'new_log', (stream, node, level, message) =>
_emit 'ping', {stream: stream.name, node: node.name}
# Only send message to web clients watching logStream
@listener.in("#{stream.name}:#{node.name}").emit 'new_log',
stream: stream.name
node: node.name
level: level
message: message
# Bind web client connection, events to web server
@listener.on 'connection', (wclient) =>
wclient.emit 'add_node', node.toDict() for n, node of @logNodes
wclient.emit 'add_stream', stream.toDict() for s, stream of @logStreams
for n, node of @logNodes
for s, stream of node.pairs
wclient.emit 'add_pair', {stream: s, node: n}
wclient.emit 'initialized'
wclient.on 'watch', (pid) ->
wclient.join pid
wclient.on 'unwatch', (pid) ->
wclient.leave pid
@_log.info 'Server started, listening...'
exports.LogServer = LogServer
exports.WebServer = WebServer
| 113027 | ### Log.io Log Server
Relays inbound log messages to web clients
LogServer receives log messages via TCP:
"+log|my_stream|my_server_host|info|this is a log message\r\n"
Announce a node, optionally with stream associations
"+node|my_server_host\r\n"
"+node|my_server_host|my_stream1,my_stream2,my_stream3\r\n"
Announce a stream, optionally with node associations
"+stream|my_stream1\r\n"
"+stream|my_stream1|my_server_host1,my_host_server2\r\n"
Remove a node or stream
"-node|my_server_host1\r\n"
"-stream|stream2\r\n"
WebServer listens for events emitted by LogServer and
forwards them to web clients via socket.io
# Usage:
logServer = new LogServer port: 28777
webServer = new WebServer logServer, port: 28778
webServer.run()
###
fs = require 'fs'
net = require 'net'
http = require 'http'
https = require 'https'
io = require 'socket.io'
events = require 'events'
winston = require 'winston'
express = require 'express'
class _LogObject
_type: 'object'
_pclass: ->
_pcollection: ->
constructor: (@logServer, @name, _pairs=[]) ->
@logServer.emit "add_#{@_type}", @
@pairs = {}
@pclass = @_pclass()
@pcollection = @_pcollection()
@addPair pname for pname in _pairs
addPair: (pname) ->
if not pair = @pairs[pname]
if not pair = @pcollection[pname]
pair = @pcollection[pname] = new @pclass @logServer, pname
pair.pairs[@name] = @
@pairs[pname] = pair
@logServer.emit "add_#{@_type}_pair", @, pname
remove: ->
@logServer.emit "remove_#{@_type}", @
delete p.pairs[@name] for name, p of @pairs
toDict: ->
name:<NAME> @name
pairs: (name for name, obj of @pairs)
class LogNode extends _LogObject
_type: 'node'
_pclass: -> LogStream
_pcollection: -> @logServer.logStreams
class LogStream extends _LogObject
_type: 'stream'
_pclass: -> LogNode
_pcollection: -> @logServer.logNodes
###
LogServer listens for TCP connections. It parses & validates
inbound TCP messages, and emits events.
###
class LogServer extends events.EventEmitter
constructor: (config={}) ->
{@host, @port} = config
@_log = config.logging ? winston
@_delimiter = config.delimiter ? '\r\n'
@logNodes = {}
@logStreams = {}
run: ->
# Create TCP listener socket
@listener = net.createServer (socket) =>
socket._buffer = ''
socket.on 'data', (data) => @_receive data, socket
socket.on 'error', (e) =>
@_log.error 'Lost TCP connection...'
@_removeNode socket.node.name if socket.node
@listener.listen @port, @host
_receive: (data, socket) =>
part = data.toString()
socket._buffer += part
@_log.debug "Received TCP message: #{part}"
@_flush socket if socket._buffer.indexOf @_delimiter >= 0
_flush: (socket) =>
# Handle messages in socket buffer
# Pause socket while modifying buffer
socket.pause()
[msgs..., socket._buffer] = socket._buffer.split @_delimiter
socket.resume()
@_handle socket, msg for msg in msgs
_handle: (socket, msg) ->
@_log.debug "Handling message: #{msg}"
[mtype, args...] = msg.split '|'
switch mtype
when '+log' then @_newLog args...
when '+node' then @_addNode args...
when '+stream' then @_addStream args...
when '-node' then @_removeNode args...
when '-stream' then @_removeStream args...
when '+bind' then @_bindNode socket, args...
else @_log.error "Invalid TCP message: #{msg}"
_addNode: (nname, snames='') ->
@__add nname, snames, @logNodes, LogNode, 'node'
_addStream: (sname, nnames='') ->
@__add sname, nnames, @logStreams, LogStream, 'stream'
_removeNode: (nname) ->
@__remove nname, @logNodes, 'node'
_removeStream: (sname) ->
@__remove sname, @logStreams, 'stream'
_newLog: (sname, nname, logLevel, message...) ->
message = message.join '|'
@_log.debug "Log message: (#{sname}, #{nname}, #{logLevel}) #{message}"
node = @logNodes[nname] or @_addNode nname, sname
stream = @logStreams[sname] or @_addStream sname, nname
@emit 'new_log', stream, node, logLevel, message
__add: (name, pnames, _collection, _objClass, objName) ->
@_log.info "Adding #{objName}: #{name} (#{pnames})"
pnames = pnames.split ','
obj = _collection[name] = _collection[name] or new _objClass @, name, pnames
obj.addPair p for p in pnames when not obj.pairs[p]
__remove: (name, _collection, objType) ->
if obj = _collection[name]
@_log.info "Removing #{objType}: #{name}"
obj.remove()
delete _collection[name]
_bindNode: (socket, obj, nname) ->
if node = @logNodes[nname]
@_log.info "Binding node '#{nname}' to TCP socket"
socket.node = node
setInterval (-> socket.write 'ping'), 2000
###
WebServer relays LogServer events to web clients via socket.io.
###
class WebServer
constructor: (@logServer, config) ->
{@host, @port, @auth} = config
{@logNodes, @logStreams} = @logServer
@restrictSocket = config.restrictSocket ? '*:*'
@_log = config.logging ? winston
# Create express server
app = @_buildServer config
@http = @_createServer config, app
_buildServer: (config) ->
app = express()
if @auth?
app.use express.basicAuth @auth.user, @auth.pass
if config.restrictHTTP
ips = new RegExp config.restrictHTTP.join '|'
app.all '/', (req, res, next) =>
if not req.ip.match ips
return res.send 403, "Your IP (#{req.ip}) is not allowed."
next()
staticPath = config.staticPath ? __dirname + '/../'
app.use express.static staticPath
_createServer: (config, app) ->
if config.ssl
return https.createServer {
key: fs.readFileSync config.ssl.key
cert: fs.readFileSync config.ssl.cert
}, app
else
return http.createServer app
run: ->
@_log.info 'Starting Log.io Web Server...'
@logServer.run()
io = io.listen @http.listen @port, @host
io.set 'log level', 1
io.set 'origins', @restrictSocket
@listener = io.sockets
_on = (args...) => @logServer.on args...
_emit = (_event, msg) =>
@_log.debug "Relaying: #{_event}"
@listener.emit _event, msg
# Bind events from LogServer to web client
_on 'add_node', (node) ->
_emit 'add_node', node.toDict()
_on 'add_stream', (stream) ->
_emit 'add_stream', stream.toDict()
_on 'add_stream_pair', (stream, nname) ->
_emit 'add_pair', {stream: stream.name, node: nname}
_on 'add_node_pair', (node, sname) ->
_emit 'add_pair', {stream: sname, node: node.name}
_on 'remove_node', (node) ->
_emit 'remove_node', node.toDict()
_on 'remove_stream', (stream) ->
_emit 'remove_stream', stream.toDict()
# Bind new log event from Logserver to web client
_on 'new_log', (stream, node, level, message) =>
_emit 'ping', {stream: stream.name, node: node.name}
# Only send message to web clients watching logStream
@listener.in("#{stream.name}:#{node.name}").emit 'new_log',
stream: stream.name
node: node.name
level: level
message: message
# Bind web client connection, events to web server
@listener.on 'connection', (wclient) =>
wclient.emit 'add_node', node.toDict() for n, node of @logNodes
wclient.emit 'add_stream', stream.toDict() for s, stream of @logStreams
for n, node of @logNodes
for s, stream of node.pairs
wclient.emit 'add_pair', {stream: s, node: n}
wclient.emit 'initialized'
wclient.on 'watch', (pid) ->
wclient.join pid
wclient.on 'unwatch', (pid) ->
wclient.leave pid
@_log.info 'Server started, listening...'
exports.LogServer = LogServer
exports.WebServer = WebServer
| true | ### Log.io Log Server
Relays inbound log messages to web clients
LogServer receives log messages via TCP:
"+log|my_stream|my_server_host|info|this is a log message\r\n"
Announce a node, optionally with stream associations
"+node|my_server_host\r\n"
"+node|my_server_host|my_stream1,my_stream2,my_stream3\r\n"
Announce a stream, optionally with node associations
"+stream|my_stream1\r\n"
"+stream|my_stream1|my_server_host1,my_host_server2\r\n"
Remove a node or stream
"-node|my_server_host1\r\n"
"-stream|stream2\r\n"
WebServer listens for events emitted by LogServer and
forwards them to web clients via socket.io
# Usage:
logServer = new LogServer port: 28777
webServer = new WebServer logServer, port: 28778
webServer.run()
###
fs = require 'fs'
net = require 'net'
http = require 'http'
https = require 'https'
io = require 'socket.io'
events = require 'events'
winston = require 'winston'
express = require 'express'
class _LogObject
_type: 'object'
_pclass: ->
_pcollection: ->
constructor: (@logServer, @name, _pairs=[]) ->
@logServer.emit "add_#{@_type}", @
@pairs = {}
@pclass = @_pclass()
@pcollection = @_pcollection()
@addPair pname for pname in _pairs
addPair: (pname) ->
if not pair = @pairs[pname]
if not pair = @pcollection[pname]
pair = @pcollection[pname] = new @pclass @logServer, pname
pair.pairs[@name] = @
@pairs[pname] = pair
@logServer.emit "add_#{@_type}_pair", @, pname
remove: ->
@logServer.emit "remove_#{@_type}", @
delete p.pairs[@name] for name, p of @pairs
toDict: ->
name:PI:NAME:<NAME>END_PI @name
pairs: (name for name, obj of @pairs)
class LogNode extends _LogObject
_type: 'node'
_pclass: -> LogStream
_pcollection: -> @logServer.logStreams
class LogStream extends _LogObject
_type: 'stream'
_pclass: -> LogNode
_pcollection: -> @logServer.logNodes
###
LogServer listens for TCP connections. It parses & validates
inbound TCP messages, and emits events.
###
class LogServer extends events.EventEmitter
constructor: (config={}) ->
{@host, @port} = config
@_log = config.logging ? winston
@_delimiter = config.delimiter ? '\r\n'
@logNodes = {}
@logStreams = {}
run: ->
# Create TCP listener socket
@listener = net.createServer (socket) =>
socket._buffer = ''
socket.on 'data', (data) => @_receive data, socket
socket.on 'error', (e) =>
@_log.error 'Lost TCP connection...'
@_removeNode socket.node.name if socket.node
@listener.listen @port, @host
_receive: (data, socket) =>
part = data.toString()
socket._buffer += part
@_log.debug "Received TCP message: #{part}"
@_flush socket if socket._buffer.indexOf @_delimiter >= 0
_flush: (socket) =>
# Handle messages in socket buffer
# Pause socket while modifying buffer
socket.pause()
[msgs..., socket._buffer] = socket._buffer.split @_delimiter
socket.resume()
@_handle socket, msg for msg in msgs
_handle: (socket, msg) ->
@_log.debug "Handling message: #{msg}"
[mtype, args...] = msg.split '|'
switch mtype
when '+log' then @_newLog args...
when '+node' then @_addNode args...
when '+stream' then @_addStream args...
when '-node' then @_removeNode args...
when '-stream' then @_removeStream args...
when '+bind' then @_bindNode socket, args...
else @_log.error "Invalid TCP message: #{msg}"
_addNode: (nname, snames='') ->
@__add nname, snames, @logNodes, LogNode, 'node'
_addStream: (sname, nnames='') ->
@__add sname, nnames, @logStreams, LogStream, 'stream'
_removeNode: (nname) ->
@__remove nname, @logNodes, 'node'
_removeStream: (sname) ->
@__remove sname, @logStreams, 'stream'
_newLog: (sname, nname, logLevel, message...) ->
message = message.join '|'
@_log.debug "Log message: (#{sname}, #{nname}, #{logLevel}) #{message}"
node = @logNodes[nname] or @_addNode nname, sname
stream = @logStreams[sname] or @_addStream sname, nname
@emit 'new_log', stream, node, logLevel, message
__add: (name, pnames, _collection, _objClass, objName) ->
@_log.info "Adding #{objName}: #{name} (#{pnames})"
pnames = pnames.split ','
obj = _collection[name] = _collection[name] or new _objClass @, name, pnames
obj.addPair p for p in pnames when not obj.pairs[p]
__remove: (name, _collection, objType) ->
if obj = _collection[name]
@_log.info "Removing #{objType}: #{name}"
obj.remove()
delete _collection[name]
_bindNode: (socket, obj, nname) ->
if node = @logNodes[nname]
@_log.info "Binding node '#{nname}' to TCP socket"
socket.node = node
setInterval (-> socket.write 'ping'), 2000
###
WebServer relays LogServer events to web clients via socket.io.
###
class WebServer
constructor: (@logServer, config) ->
{@host, @port, @auth} = config
{@logNodes, @logStreams} = @logServer
@restrictSocket = config.restrictSocket ? '*:*'
@_log = config.logging ? winston
# Create express server
app = @_buildServer config
@http = @_createServer config, app
_buildServer: (config) ->
app = express()
if @auth?
app.use express.basicAuth @auth.user, @auth.pass
if config.restrictHTTP
ips = new RegExp config.restrictHTTP.join '|'
app.all '/', (req, res, next) =>
if not req.ip.match ips
return res.send 403, "Your IP (#{req.ip}) is not allowed."
next()
staticPath = config.staticPath ? __dirname + '/../'
app.use express.static staticPath
_createServer: (config, app) ->
if config.ssl
return https.createServer {
key: fs.readFileSync config.ssl.key
cert: fs.readFileSync config.ssl.cert
}, app
else
return http.createServer app
run: ->
@_log.info 'Starting Log.io Web Server...'
@logServer.run()
io = io.listen @http.listen @port, @host
io.set 'log level', 1
io.set 'origins', @restrictSocket
@listener = io.sockets
_on = (args...) => @logServer.on args...
_emit = (_event, msg) =>
@_log.debug "Relaying: #{_event}"
@listener.emit _event, msg
# Bind events from LogServer to web client
_on 'add_node', (node) ->
_emit 'add_node', node.toDict()
_on 'add_stream', (stream) ->
_emit 'add_stream', stream.toDict()
_on 'add_stream_pair', (stream, nname) ->
_emit 'add_pair', {stream: stream.name, node: nname}
_on 'add_node_pair', (node, sname) ->
_emit 'add_pair', {stream: sname, node: node.name}
_on 'remove_node', (node) ->
_emit 'remove_node', node.toDict()
_on 'remove_stream', (stream) ->
_emit 'remove_stream', stream.toDict()
# Bind new log event from Logserver to web client
_on 'new_log', (stream, node, level, message) =>
_emit 'ping', {stream: stream.name, node: node.name}
# Only send message to web clients watching logStream
@listener.in("#{stream.name}:#{node.name}").emit 'new_log',
stream: stream.name
node: node.name
level: level
message: message
# Bind web client connection, events to web server
@listener.on 'connection', (wclient) =>
wclient.emit 'add_node', node.toDict() for n, node of @logNodes
wclient.emit 'add_stream', stream.toDict() for s, stream of @logStreams
for n, node of @logNodes
for s, stream of node.pairs
wclient.emit 'add_pair', {stream: s, node: n}
wclient.emit 'initialized'
wclient.on 'watch', (pid) ->
wclient.join pid
wclient.on 'unwatch', (pid) ->
wclient.leave pid
@_log.info 'Server started, listening...'
exports.LogServer = LogServer
exports.WebServer = WebServer
|
[
{
"context": " trooper = new Trooper(@pos.x, @pos.y, {name: 'Trooper'})\n me.game.add(trooper, @_given_z)\n ",
"end": 466,
"score": 0.9863381385803223,
"start": 459,
"tag": "NAME",
"value": "Trooper"
}
] | src/places/trooperGenerator.coffee | commandojs/CommandoJS | 41 | TrooperGenerator = Generator.extend(
_given_z: null
init: (x, y, settings) ->
@parent(x, y, settings)
@_given_z = Utils.parseNumber(@settings.z, the_hero.z)
numberOfActiveTroopers: ->
ret = 0
for tr in me.game.getEntityByName('trooper')
ret++ if tr.inViewport()
ret
generate: ->
return false if @numberOfActiveTroopers() >= 6
trooper = new Trooper(@pos.x, @pos.y, {name: 'Trooper'})
me.game.add(trooper, @_given_z)
Utils.sortEntities()
true
)
| 7700 | TrooperGenerator = Generator.extend(
_given_z: null
init: (x, y, settings) ->
@parent(x, y, settings)
@_given_z = Utils.parseNumber(@settings.z, the_hero.z)
numberOfActiveTroopers: ->
ret = 0
for tr in me.game.getEntityByName('trooper')
ret++ if tr.inViewport()
ret
generate: ->
return false if @numberOfActiveTroopers() >= 6
trooper = new Trooper(@pos.x, @pos.y, {name: '<NAME>'})
me.game.add(trooper, @_given_z)
Utils.sortEntities()
true
)
| true | TrooperGenerator = Generator.extend(
_given_z: null
init: (x, y, settings) ->
@parent(x, y, settings)
@_given_z = Utils.parseNumber(@settings.z, the_hero.z)
numberOfActiveTroopers: ->
ret = 0
for tr in me.game.getEntityByName('trooper')
ret++ if tr.inViewport()
ret
generate: ->
return false if @numberOfActiveTroopers() >= 6
trooper = new Trooper(@pos.x, @pos.y, {name: 'PI:NAME:<NAME>END_PI'})
me.game.add(trooper, @_given_z)
Utils.sortEntities()
true
)
|
[
{
"context": "laylists: =>\n init_req = @users_playlists_url(@user_id, limit: 50)\n @recursive_get_playlists(init_req",
"end": 878,
"score": 0.9448311924934387,
"start": 871,
"tag": "USERNAME",
"value": "user_id"
},
{
"context": "hed\n ###\n get_playlist_tracks: (playlist_id,... | front/coffee/spotify_client.coffee | dropofwill/upm-ui | 0 | "use strict"
app = window.config_app()
class SpotifyClient
constructor: ->
@spotify_api_host = "https://api.spotify.com/v1"
@echo_api_host = "https://developer.echonest.com/api/v4"
# echonest rate limiting is annoying
@max_playlist_size = 2
@access = get_access()
@echo_key = get_echo()
@refresh = get_refresh()
@user_id = get_user()
@user_playlists = []
@echo_tracks = []
@spotify_tracks = []
@track_ids = []
@key_list = [ "C", "C♯", "D", "E♭", "E", "F", "F♯",
"G", "A♭", "A", "B♭", "B"]
@mode_list = [ "Min.", "Maj." ]
###
# Retrieve all of the logged in users' playlists, making multiple requests as
# necessary.
# Triggers the 'upm:playlistLoad' event when finished
###
get_users_playlists: =>
init_req = @users_playlists_url(@user_id, limit: 50)
@recursive_get_playlists(init_req)
###
# Retrieve all of the tracks from a given playlists, making multiple
# requests as necessary. If the playlist isn't owned by the current logged in
# user the owners id is required as a parameter,
# Triggers the 'upm:tracksLoad' event when finished
###
get_playlist_tracks: (playlist_id, uid=@user_id) =>
@current_tracks = []
init_req = @playlist_tracks_url(uid, playlist_id, limit: 75)
@recursive_get_tracks(init_req)
###
# Creates a playlist and upon completion add the current tracks to it
# Currently throwing a 403 despite correct user scopes, may be a bug in the
# Spotify API
###
create_playlist: (playlist_id) =>
create_url = @create_playlist_url()
track_ids = _.map($('#js-playlist-table tbody tr'), (el) ->
$(el).attr("id"))
@post_create_playlist(create_url, playlist_id, (res) =>
tracks_url = @add_playlist_url(res.id)
@post_tracks_to_playlist(tracks_url, track_ids)
)
logger: (res) -> console.log(res, @current_tracks)
###
# Reduce the spotify response to just an array of track objects
# Extract just the track_ids
# Make an ajax request to echonest for the meta data
# Triggers the 'upm:echoLoad' event when finished
###
get_echo_track_data: (spotify_playlist_res) =>
@spotify_tracks = reduce_spotify_tracks(spotify_playlist_res)
@track_ids = _.pluck(@spotify_tracks, 'uri')
@get_echo_audio_summary(@echo_tracks_url(@track_ids))
###
# Takes an array of track objects and returns an array of track uris
###
pluck_ids: (tracks) -> _.pluck(tracks, 'uri')
###
# Takes a parsed response and renders a lodash template of html
###
render_playlists: (playlists_res) =>
app.templates.user_playlists(process_playlists(playlists_res))
###
# Takes a parsed response and renders a lodash template of html
###
render_playlist: (playlist_res) =>
data = {}
data.head = app.templates.track_head()
data.body = _.reduce(playlist_res,
(template, track) -> template + "\n" + app.templates.track(track))
app.templates.table_shell(data)
###
# Can't stringify track_ids because they would be duplicate hash keys
#
# http://developer.echonest.com/api/v4/song/profile?api_key=DDP9J5HAUE4JGKHOS&format=json&track_id=spotify:track:3L7BcXHCG8uT92viO6Tikl&track_id=spotify:track:4sgd8Oe36YeA1YpCzPBjiC&bucket=audio_summary
###
echo_tracks_url: (track_ids) =>
base =
api_key: @echo_key
format: 'json'
bucket: 'audio_summary'
tracks_qs = "&track_id=#{track_ids.join("&track_id=")}"
api_url("#{@echo_api_host}/song/profile", base) + tracks_qs
###
# Generate the urls for each of the API requests
###
users_playlists_url: (user_id, qs_obj=null) ->
api_url("#{@spotify_api_host}/users/#{user_id}/playlists", qs_obj)
playlist_tracks_url: (user_id, playlist_id, qs_obj=null) =>
api_url(
"#{@spotify_api_host}/users/#{user_id}/playlists/#{playlist_id}/tracks",
qs_obj)
create_playlist_url: =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists")
add_playlist_url: (playlist_id) =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists/#{playlist_id}/tracks")
###
# Get audio summary data from the Echonest API
###
get_echo_audio_summary: (req_url) =>
$.ajax(
url: req_url
success: (res) =>
@echo_tracks = res.response.songs
$(window).trigger('upm:echoLoad'))
###
# As long as there is another page of playlists and we aren't at our limit
# make another request.
# Save the result in an instance variable this.user_playlists
# When its done fire the custom event 'upm:playlistsLoad'
###
recursive_get_playlists: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@user_playlists.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_playlists(res.next)
else
$(window).trigger('upm:playlistsLoad'))
###
# As long as there is another page of tracks and we aren't at our limit
# make another request.
# Save the result in an instance variable this.current_tracks
# When its done fire the custom event 'upm:tracksLoad'
###
recursive_get_tracks: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@current_tracks.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_tracks(res.next)
else
$(window).trigger('upm:tracksLoad'))
###
# Post request to create a new *empty* playlist of the given name
# Takes a callback function that fires on completion since you probably
# want to add some tracks.
###
post_create_playlist: (req_url, name, callback) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"name": name, "public": true})
contentType: 'application/json'
dataType: 'json'
headers: auth_header(@access)
success: (res) => callback(res))
###
# Post request to create add a list of tracks (based on the spotify uri)
# to a given playlist
###
post_tracks_to_playlist: (req_url, list_of_ids) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"uris": list_of_ids})
contentType: 'application/json'
dataType: 'json'
headers:
'Authorization': 'Bearer ' + @access
succes: (res) => console.log("Added tracks"))
###
# Parse the json response to only have objects with the name, id, and owner
# attributes to be passed to the view
###
process_playlists = (playlists_res) =>
data = _.chain(playlists_res)
.flatten()
.map((playlist) -> _.pick(playlist, 'name', 'id', 'owner'))
.value()
###
# Parse the json response to only have the track objects
###
reduce_spotify_tracks = (playlist_res) ->
_.chain(playlist_res)
.flatten()
.map((track) -> _.get(track, 'track'))
.value()
###
# Merge responses from echonest and spotify and convert data into a more
# human-readable format for the data table.
###
merge_echo_spotify: (spotify_t=@spotify_tracks, echo_t=@echo_tracks) =>
self = this
merged = _.merge(spotify_t,
_.map(echo_t, (track) -> _.get(track, 'audio_summary')))
_.map(merged, (o) ->
_.forEach(o, (v,k) ->
switch k
when 'key' then _.set(o, k, self.key_list[v])
when 'mode' then _.set(o, k, self.mode_list[v])
when 'artists' then _.set(o, k, _.get(_.first(v), 'name'))
when 'duration' then _.set(o, k, seconds_to_s(v))
when 'tempo' then _.set(o, k, parseInt(v))
when 'valence', 'energy', 'danceability', 'acousticness', 'liveness'
_.set(o, k, decimal_to_per(v))
)
)
###
# Private helper method for encoding an OAuth 2.0 Bearer header
###
auth_header = (access) => 'Authorization': 'Bearer ' + access
###
# Private helper method for building an api endpoint call, with and optional
# query string object
###
api_url = (endpoint, qs_obj=null) ->
if qs_obj?
qs = Url.stringify(qs_obj)
endpoint = "#{endpoint}?#{qs}"
endpoint
app.SpotifyClient = SpotifyClient
| 138249 | "use strict"
app = window.config_app()
class SpotifyClient
constructor: ->
@spotify_api_host = "https://api.spotify.com/v1"
@echo_api_host = "https://developer.echonest.com/api/v4"
# echonest rate limiting is annoying
@max_playlist_size = 2
@access = get_access()
@echo_key = get_echo()
@refresh = get_refresh()
@user_id = get_user()
@user_playlists = []
@echo_tracks = []
@spotify_tracks = []
@track_ids = []
@key_list = [ "C", "C♯", "D", "E♭", "E", "F", "F♯",
"G", "A♭", "A", "B♭", "B"]
@mode_list = [ "Min.", "Maj." ]
###
# Retrieve all of the logged in users' playlists, making multiple requests as
# necessary.
# Triggers the 'upm:playlistLoad' event when finished
###
get_users_playlists: =>
init_req = @users_playlists_url(@user_id, limit: 50)
@recursive_get_playlists(init_req)
###
# Retrieve all of the tracks from a given playlists, making multiple
# requests as necessary. If the playlist isn't owned by the current logged in
# user the owners id is required as a parameter,
# Triggers the 'upm:tracksLoad' event when finished
###
get_playlist_tracks: (playlist_id, uid=@user_id) =>
@current_tracks = []
init_req = @playlist_tracks_url(uid, playlist_id, limit: 75)
@recursive_get_tracks(init_req)
###
# Creates a playlist and upon completion add the current tracks to it
# Currently throwing a 403 despite correct user scopes, may be a bug in the
# Spotify API
###
create_playlist: (playlist_id) =>
create_url = @create_playlist_url()
track_ids = _.map($('#js-playlist-table tbody tr'), (el) ->
$(el).attr("id"))
@post_create_playlist(create_url, playlist_id, (res) =>
tracks_url = @add_playlist_url(res.id)
@post_tracks_to_playlist(tracks_url, track_ids)
)
logger: (res) -> console.log(res, @current_tracks)
###
# Reduce the spotify response to just an array of track objects
# Extract just the track_ids
# Make an ajax request to echonest for the meta data
# Triggers the 'upm:echoLoad' event when finished
###
get_echo_track_data: (spotify_playlist_res) =>
@spotify_tracks = reduce_spotify_tracks(spotify_playlist_res)
@track_ids = _.pluck(@spotify_tracks, 'uri')
@get_echo_audio_summary(@echo_tracks_url(@track_ids))
###
# Takes an array of track objects and returns an array of track uris
###
pluck_ids: (tracks) -> _.pluck(tracks, 'uri')
###
# Takes a parsed response and renders a lodash template of html
###
render_playlists: (playlists_res) =>
app.templates.user_playlists(process_playlists(playlists_res))
###
# Takes a parsed response and renders a lodash template of html
###
render_playlist: (playlist_res) =>
data = {}
data.head = app.templates.track_head()
data.body = _.reduce(playlist_res,
(template, track) -> template + "\n" + app.templates.track(track))
app.templates.table_shell(data)
###
# Can't stringify track_ids because they would be duplicate hash keys
#
# http://developer.echonest.com/api/v4/song/profile?api_key=<KEY>&format=json&track_id=spotify:track:3L7BcXHCG8uT92viO6Tikl&track_id=spotify:track:4sgd8Oe36YeA1YpCzPBjiC&bucket=audio_summary
###
echo_tracks_url: (track_ids) =>
base =
api_key: @echo_key
format: 'json'
bucket: 'audio_summary'
tracks_qs = "&track_id=#{track_ids.join("&track_id=")}"
api_url("#{@echo_api_host}/song/profile", base) + tracks_qs
###
# Generate the urls for each of the API requests
###
users_playlists_url: (user_id, qs_obj=null) ->
api_url("#{@spotify_api_host}/users/#{user_id}/playlists", qs_obj)
playlist_tracks_url: (user_id, playlist_id, qs_obj=null) =>
api_url(
"#{@spotify_api_host}/users/#{user_id}/playlists/#{playlist_id}/tracks",
qs_obj)
create_playlist_url: =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists")
add_playlist_url: (playlist_id) =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists/#{playlist_id}/tracks")
###
# Get audio summary data from the Echonest API
###
get_echo_audio_summary: (req_url) =>
$.ajax(
url: req_url
success: (res) =>
@echo_tracks = res.response.songs
$(window).trigger('upm:echoLoad'))
###
# As long as there is another page of playlists and we aren't at our limit
# make another request.
# Save the result in an instance variable this.user_playlists
# When its done fire the custom event 'upm:playlistsLoad'
###
recursive_get_playlists: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@user_playlists.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_playlists(res.next)
else
$(window).trigger('upm:playlistsLoad'))
###
# As long as there is another page of tracks and we aren't at our limit
# make another request.
# Save the result in an instance variable this.current_tracks
# When its done fire the custom event 'upm:tracksLoad'
###
recursive_get_tracks: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@current_tracks.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_tracks(res.next)
else
$(window).trigger('upm:tracksLoad'))
###
# Post request to create a new *empty* playlist of the given name
# Takes a callback function that fires on completion since you probably
# want to add some tracks.
###
post_create_playlist: (req_url, name, callback) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"name": name, "public": true})
contentType: 'application/json'
dataType: 'json'
headers: auth_header(@access)
success: (res) => callback(res))
###
# Post request to create add a list of tracks (based on the spotify uri)
# to a given playlist
###
post_tracks_to_playlist: (req_url, list_of_ids) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"uris": list_of_ids})
contentType: 'application/json'
dataType: 'json'
headers:
'Authorization': 'Bearer ' + @access
succes: (res) => console.log("Added tracks"))
###
# Parse the json response to only have objects with the name, id, and owner
# attributes to be passed to the view
###
process_playlists = (playlists_res) =>
data = _.chain(playlists_res)
.flatten()
.map((playlist) -> _.pick(playlist, 'name', 'id', 'owner'))
.value()
###
# Parse the json response to only have the track objects
###
reduce_spotify_tracks = (playlist_res) ->
_.chain(playlist_res)
.flatten()
.map((track) -> _.get(track, 'track'))
.value()
###
# Merge responses from echonest and spotify and convert data into a more
# human-readable format for the data table.
###
merge_echo_spotify: (spotify_t=@spotify_tracks, echo_t=@echo_tracks) =>
self = this
merged = _.merge(spotify_t,
_.map(echo_t, (track) -> _.get(track, 'audio_summary')))
_.map(merged, (o) ->
_.forEach(o, (v,k) ->
switch k
when 'key' then _.set(o, k, self.key_list[v])
when 'mode' then _.set(o, k, self.mode_list[v])
when 'artists' then _.set(o, k, _.get(_.first(v), 'name'))
when 'duration' then _.set(o, k, seconds_to_s(v))
when 'tempo' then _.set(o, k, parseInt(v))
when 'valence', 'energy', 'danceability', 'acousticness', 'liveness'
_.set(o, k, decimal_to_per(v))
)
)
###
# Private helper method for encoding an OAuth 2.0 Bearer header
###
auth_header = (access) => 'Authorization': 'Bearer ' + access
###
# Private helper method for building an api endpoint call, with and optional
# query string object
###
api_url = (endpoint, qs_obj=null) ->
if qs_obj?
qs = Url.stringify(qs_obj)
endpoint = "#{endpoint}?#{qs}"
endpoint
app.SpotifyClient = SpotifyClient
| true | "use strict"
app = window.config_app()
class SpotifyClient
constructor: ->
@spotify_api_host = "https://api.spotify.com/v1"
@echo_api_host = "https://developer.echonest.com/api/v4"
# echonest rate limiting is annoying
@max_playlist_size = 2
@access = get_access()
@echo_key = get_echo()
@refresh = get_refresh()
@user_id = get_user()
@user_playlists = []
@echo_tracks = []
@spotify_tracks = []
@track_ids = []
@key_list = [ "C", "C♯", "D", "E♭", "E", "F", "F♯",
"G", "A♭", "A", "B♭", "B"]
@mode_list = [ "Min.", "Maj." ]
###
# Retrieve all of the logged in users' playlists, making multiple requests as
# necessary.
# Triggers the 'upm:playlistLoad' event when finished
###
get_users_playlists: =>
init_req = @users_playlists_url(@user_id, limit: 50)
@recursive_get_playlists(init_req)
###
# Retrieve all of the tracks from a given playlists, making multiple
# requests as necessary. If the playlist isn't owned by the current logged in
# user the owners id is required as a parameter,
# Triggers the 'upm:tracksLoad' event when finished
###
get_playlist_tracks: (playlist_id, uid=@user_id) =>
@current_tracks = []
init_req = @playlist_tracks_url(uid, playlist_id, limit: 75)
@recursive_get_tracks(init_req)
###
# Creates a playlist and upon completion add the current tracks to it
# Currently throwing a 403 despite correct user scopes, may be a bug in the
# Spotify API
###
create_playlist: (playlist_id) =>
create_url = @create_playlist_url()
track_ids = _.map($('#js-playlist-table tbody tr'), (el) ->
$(el).attr("id"))
@post_create_playlist(create_url, playlist_id, (res) =>
tracks_url = @add_playlist_url(res.id)
@post_tracks_to_playlist(tracks_url, track_ids)
)
logger: (res) -> console.log(res, @current_tracks)
###
# Reduce the spotify response to just an array of track objects
# Extract just the track_ids
# Make an ajax request to echonest for the meta data
# Triggers the 'upm:echoLoad' event when finished
###
get_echo_track_data: (spotify_playlist_res) =>
@spotify_tracks = reduce_spotify_tracks(spotify_playlist_res)
@track_ids = _.pluck(@spotify_tracks, 'uri')
@get_echo_audio_summary(@echo_tracks_url(@track_ids))
###
# Takes an array of track objects and returns an array of track uris
###
pluck_ids: (tracks) -> _.pluck(tracks, 'uri')
###
# Takes a parsed response and renders a lodash template of html
###
render_playlists: (playlists_res) =>
app.templates.user_playlists(process_playlists(playlists_res))
###
# Takes a parsed response and renders a lodash template of html
###
render_playlist: (playlist_res) =>
data = {}
data.head = app.templates.track_head()
data.body = _.reduce(playlist_res,
(template, track) -> template + "\n" + app.templates.track(track))
app.templates.table_shell(data)
###
# Can't stringify track_ids because they would be duplicate hash keys
#
# http://developer.echonest.com/api/v4/song/profile?api_key=PI:KEY:<KEY>END_PI&format=json&track_id=spotify:track:3L7BcXHCG8uT92viO6Tikl&track_id=spotify:track:4sgd8Oe36YeA1YpCzPBjiC&bucket=audio_summary
###
echo_tracks_url: (track_ids) =>
base =
api_key: @echo_key
format: 'json'
bucket: 'audio_summary'
tracks_qs = "&track_id=#{track_ids.join("&track_id=")}"
api_url("#{@echo_api_host}/song/profile", base) + tracks_qs
###
# Generate the urls for each of the API requests
###
users_playlists_url: (user_id, qs_obj=null) ->
api_url("#{@spotify_api_host}/users/#{user_id}/playlists", qs_obj)
playlist_tracks_url: (user_id, playlist_id, qs_obj=null) =>
api_url(
"#{@spotify_api_host}/users/#{user_id}/playlists/#{playlist_id}/tracks",
qs_obj)
create_playlist_url: =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists")
add_playlist_url: (playlist_id) =>
api_url("#{@spotify_api_host}/users/#{@user_id}/playlists/#{playlist_id}/tracks")
###
# Get audio summary data from the Echonest API
###
get_echo_audio_summary: (req_url) =>
$.ajax(
url: req_url
success: (res) =>
@echo_tracks = res.response.songs
$(window).trigger('upm:echoLoad'))
###
# As long as there is another page of playlists and we aren't at our limit
# make another request.
# Save the result in an instance variable this.user_playlists
# When its done fire the custom event 'upm:playlistsLoad'
###
recursive_get_playlists: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@user_playlists.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_playlists(res.next)
else
$(window).trigger('upm:playlistsLoad'))
###
# As long as there is another page of tracks and we aren't at our limit
# make another request.
# Save the result in an instance variable this.current_tracks
# When its done fire the custom event 'upm:tracksLoad'
###
recursive_get_tracks: (req_url) =>
$.ajax(
url: req_url
headers: auth_header(@access)
success: (res) =>
@current_tracks.push(res.items)
if res.next? and @user_playlists.length < @max_iterations
@recursive_get_tracks(res.next)
else
$(window).trigger('upm:tracksLoad'))
###
# Post request to create a new *empty* playlist of the given name
# Takes a callback function that fires on completion since you probably
# want to add some tracks.
###
post_create_playlist: (req_url, name, callback) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"name": name, "public": true})
contentType: 'application/json'
dataType: 'json'
headers: auth_header(@access)
success: (res) => callback(res))
###
# Post request to create add a list of tracks (based on the spotify uri)
# to a given playlist
###
post_tracks_to_playlist: (req_url, list_of_ids) =>
$.ajax(
url: req_url
method: 'POST'
data: JSON.stringify({"uris": list_of_ids})
contentType: 'application/json'
dataType: 'json'
headers:
'Authorization': 'Bearer ' + @access
succes: (res) => console.log("Added tracks"))
###
# Parse the json response to only have objects with the name, id, and owner
# attributes to be passed to the view
###
process_playlists = (playlists_res) =>
data = _.chain(playlists_res)
.flatten()
.map((playlist) -> _.pick(playlist, 'name', 'id', 'owner'))
.value()
###
# Parse the json response to only have the track objects
###
reduce_spotify_tracks = (playlist_res) ->
_.chain(playlist_res)
.flatten()
.map((track) -> _.get(track, 'track'))
.value()
###
# Merge responses from echonest and spotify and convert data into a more
# human-readable format for the data table.
###
merge_echo_spotify: (spotify_t=@spotify_tracks, echo_t=@echo_tracks) =>
self = this
merged = _.merge(spotify_t,
_.map(echo_t, (track) -> _.get(track, 'audio_summary')))
_.map(merged, (o) ->
_.forEach(o, (v,k) ->
switch k
when 'key' then _.set(o, k, self.key_list[v])
when 'mode' then _.set(o, k, self.mode_list[v])
when 'artists' then _.set(o, k, _.get(_.first(v), 'name'))
when 'duration' then _.set(o, k, seconds_to_s(v))
when 'tempo' then _.set(o, k, parseInt(v))
when 'valence', 'energy', 'danceability', 'acousticness', 'liveness'
_.set(o, k, decimal_to_per(v))
)
)
###
# Private helper method for encoding an OAuth 2.0 Bearer header
###
auth_header = (access) => 'Authorization': 'Bearer ' + access
###
# Private helper method for building an api endpoint call, with and optional
# query string object
###
api_url = (endpoint, qs_obj=null) ->
if qs_obj?
qs = Url.stringify(qs_obj)
endpoint = "#{endpoint}?#{qs}"
endpoint
app.SpotifyClient = SpotifyClient
|
[
{
"context": ".a.String\n i.should.have.property('key','52998e1c32e5724771000001').be.a.String\n i.should.have.property('v",
"end": 2557,
"score": 0.9997041821479797,
"start": 2533,
"tag": "KEY",
"value": "52998e1c32e5724771000001"
},
{
"context": "ring\n i... | test/method-users-get-or-create-user-from-provider-tests.coffee | codedoctor/mongoose-user-store-multi-tenant | 4 | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
fixtures = require './support/fixtures'
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getOrCreateUserFromProvider', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getOrCreateUserFromProvider fixtures._tenantId,fixtures.providerNameSome,fixtures.accessTokenSome,fixtures.secretSome,fixtures.profileSome,roles: ['rolea','roleb'], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property('createdAt').be.a.Date
result.should.have.property('updatedAt').be.a.Date
result.updatedAt.should.equal result.createdAt
result.should.have.property('title','').be.a.String
result.should.have.property('displayName').be.a.String #.with.lengthOf(26)
result.should.have.property('username').be.a.String #.with.lengthOf(26)
result.should.have.property('_tenantId')
result._tenantId.toString().should.have.lengthOf(24)
result.should.have.property('resourceLimits').be.an.Array #.lengthOf(0)
result.should.have.property('isDeleted',false).be.a.Boolean
result.should.have.property('deletedAt',null)
result.should.have.property('description').be.a.String #.with.lengthOf(0)
result.should.have.property('_id')
result._id.toString().should.have.lengthOf(24)
result.should.have.property('needsInit',true).be.a.Boolean
result.should.have.property('onboardingState',null)
result.should.have.property('roles').be.an.Array #.lengthOf(2)
result.roles[0].should.equal "rolea"
result.roles[1].should.equal "roleb"
result.should.have.property('emails').be.an.Array #.lengthOf(0)
result.should.have.property('userImages').be.an.Array #.lengthOf(0)
result.should.have.property('profileLinks').be.an.Array #.lengthOf(0)
result.should.have.property('identities').be.an.Array #.lengthOf(1)
i = result.identities[0]
i.should.have.property('provider','some').be.a.String
i.should.have.property('key','52998e1c32e5724771000001').be.a.String
i.should.have.property('v1','accesstokensome').be.a.String
i.should.have.property('v2','secretsome').be.a.String
i.should.have.property('_id')
i._id.toString().should.have.lengthOf(24)
i.should.have.property('displayName','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('username','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('providerType','oauth').be.a.String
#console.log JSON.stringify(result,null,2)
cb()
| 20460 | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
fixtures = require './support/fixtures'
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getOrCreateUserFromProvider', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getOrCreateUserFromProvider fixtures._tenantId,fixtures.providerNameSome,fixtures.accessTokenSome,fixtures.secretSome,fixtures.profileSome,roles: ['rolea','roleb'], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property('createdAt').be.a.Date
result.should.have.property('updatedAt').be.a.Date
result.updatedAt.should.equal result.createdAt
result.should.have.property('title','').be.a.String
result.should.have.property('displayName').be.a.String #.with.lengthOf(26)
result.should.have.property('username').be.a.String #.with.lengthOf(26)
result.should.have.property('_tenantId')
result._tenantId.toString().should.have.lengthOf(24)
result.should.have.property('resourceLimits').be.an.Array #.lengthOf(0)
result.should.have.property('isDeleted',false).be.a.Boolean
result.should.have.property('deletedAt',null)
result.should.have.property('description').be.a.String #.with.lengthOf(0)
result.should.have.property('_id')
result._id.toString().should.have.lengthOf(24)
result.should.have.property('needsInit',true).be.a.Boolean
result.should.have.property('onboardingState',null)
result.should.have.property('roles').be.an.Array #.lengthOf(2)
result.roles[0].should.equal "rolea"
result.roles[1].should.equal "roleb"
result.should.have.property('emails').be.an.Array #.lengthOf(0)
result.should.have.property('userImages').be.an.Array #.lengthOf(0)
result.should.have.property('profileLinks').be.an.Array #.lengthOf(0)
result.should.have.property('identities').be.an.Array #.lengthOf(1)
i = result.identities[0]
i.should.have.property('provider','some').be.a.String
i.should.have.property('key','<KEY>').be.a.String
i.should.have.property('v1','accesstokensome').be.a.String
i.should.have.property('v2','secretsome').be.a.String
i.should.have.property('_id')
i._id.toString().should.have.lengthOf(24)
i.should.have.property('displayName','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('username','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('providerType','oauth').be.a.String
#console.log JSON.stringify(result,null,2)
cb()
| true | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
fixtures = require './support/fixtures'
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getOrCreateUserFromProvider', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getOrCreateUserFromProvider fixtures._tenantId,fixtures.providerNameSome,fixtures.accessTokenSome,fixtures.secretSome,fixtures.profileSome,roles: ['rolea','roleb'], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property('createdAt').be.a.Date
result.should.have.property('updatedAt').be.a.Date
result.updatedAt.should.equal result.createdAt
result.should.have.property('title','').be.a.String
result.should.have.property('displayName').be.a.String #.with.lengthOf(26)
result.should.have.property('username').be.a.String #.with.lengthOf(26)
result.should.have.property('_tenantId')
result._tenantId.toString().should.have.lengthOf(24)
result.should.have.property('resourceLimits').be.an.Array #.lengthOf(0)
result.should.have.property('isDeleted',false).be.a.Boolean
result.should.have.property('deletedAt',null)
result.should.have.property('description').be.a.String #.with.lengthOf(0)
result.should.have.property('_id')
result._id.toString().should.have.lengthOf(24)
result.should.have.property('needsInit',true).be.a.Boolean
result.should.have.property('onboardingState',null)
result.should.have.property('roles').be.an.Array #.lengthOf(2)
result.roles[0].should.equal "rolea"
result.roles[1].should.equal "roleb"
result.should.have.property('emails').be.an.Array #.lengthOf(0)
result.should.have.property('userImages').be.an.Array #.lengthOf(0)
result.should.have.property('profileLinks').be.an.Array #.lengthOf(0)
result.should.have.property('identities').be.an.Array #.lengthOf(1)
i = result.identities[0]
i.should.have.property('provider','some').be.a.String
i.should.have.property('key','PI:KEY:<KEY>END_PI').be.a.String
i.should.have.property('v1','accesstokensome').be.a.String
i.should.have.property('v2','secretsome').be.a.String
i.should.have.property('_id')
i._id.toString().should.have.lengthOf(24)
i.should.have.property('displayName','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('username','fb52998e1c32e5724771000001').be.a.String
i.should.have.property('providerType','oauth').be.a.String
#console.log JSON.stringify(result,null,2)
cb()
|
[
{
"context": "re is licensed under the MIT License.\n\n Copyright Fedor Indutny, 2011.\n\n Permission is hereby granted, free of c",
"end": 124,
"score": 0.9995713233947754,
"start": 111,
"tag": "NAME",
"value": "Fedor Indutny"
}
] | node_modules/index/lib/index/core/set.coffee | beshrkayali/monkey-release-action | 12 | ###
Set functionality for Node index module
This software is licensed under the MIT License.
Copyright Fedor Indutny, 2011.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
###
step = require 'step'
utils = require '../../index/utils'
###
Set
###
exports.set = (key, value, _callback) ->
sort = @sort
order = @order
storage = @storage
conflictManager = @conflictManager
if @lock(=> @set key, value, _callback)
return
callback = (err, data) =>
@releaseLock()
process.nextTick ->
_callback and _callback err, data
iterate = (page, callback) ->
item_index = utils.search page, sort, key
item = page[item_index]
if item and not item[2]
# Index
# Read next page and try to insert kv in it
step ->
storage.read item[1], @parallel()
, (err, page) ->
if err
throw err
iterate page, @parallel()
, (err, result) ->
if err
callback err
return
if storage.isPosition result
# Page is just should be overwrited
page[item_index][1] = result
storage.write page, callback
else
# Result is = {
# left_page: [...],
# middle_key: ...,
# right_page: [...]
# }
page[item_index][1] = result.left_page
page.splice item_index + 1, 0,
[result.middle_key, result.right_page]
splitPage false, storage, order, page, callback
else
# Leaf
step ->
# Found dublicate
if item and sort(item[0], key) is 0
unless conflictManager
throw 'Can\'t insert item w/ dublicate key'
# Invoke conflictManager
step ->
storage.read item[1], @parallel()
, (err, old_value) ->
if err
throw err
@parallel() null, old_value
conflictManager old_value, value, @parallel()
, @parallel()
return
@parallel() null, value
, (err, value, old_value) ->
if err
throw err
# Value should be firstly written in storage
item_index = if item_index is null then 0 else item_index + 1
storage.write [value, old_value], @parallel()
, (err, value) ->
if err
callback err
return
# Then inserted in leaf page
page.splice item_index, 0, [key, value, 1]
splitPage true, storage, order, page, callback
step ->
# Read initial data
storage.readRoot @parallel()
, (err, root) ->
if err
throw err
# Initiate sequence
iterate root, @parallel()
, (err, result) ->
if err
throw err
if storage.isPosition result
# Write new root
@parallel() null, result
else
# Split root
storage.write [
[null, result.left_page],
[result.middle_key, result.right_page]
], @parallel()
, (err, new_root_pos) ->
if err
throw err
storage.writeRoot new_root_pos, @parallel()
, callback
###
Check page length
If exceed - split it into two and return left_page, right_page, middle_key
###
splitPage = (in_leaf, storage, order, page, callback) ->
# If item needs to be splitted
if page.length > order
mid_index = page.length >> 1
mid_key = page[mid_index][0]
# Write splitted pages
step ->
left_page = page[0...mid_index]
storage.write left_page, @parallel()
right_page = page[mid_index...]
right_page[0][0] = null unless in_leaf
storage.write right_page, @parallel()
, (err, left_page, right_page) ->
callback err, {
left_page: left_page,
middle_key: mid_key,
right_page: right_page
}
else
# Just overwrite it
storage.write page, callback
| 128048 | ###
Set functionality for Node index module
This software is licensed under the MIT License.
Copyright <NAME>, 2011.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
###
step = require 'step'
utils = require '../../index/utils'
###
Set
###
exports.set = (key, value, _callback) ->
sort = @sort
order = @order
storage = @storage
conflictManager = @conflictManager
if @lock(=> @set key, value, _callback)
return
callback = (err, data) =>
@releaseLock()
process.nextTick ->
_callback and _callback err, data
iterate = (page, callback) ->
item_index = utils.search page, sort, key
item = page[item_index]
if item and not item[2]
# Index
# Read next page and try to insert kv in it
step ->
storage.read item[1], @parallel()
, (err, page) ->
if err
throw err
iterate page, @parallel()
, (err, result) ->
if err
callback err
return
if storage.isPosition result
# Page is just should be overwrited
page[item_index][1] = result
storage.write page, callback
else
# Result is = {
# left_page: [...],
# middle_key: ...,
# right_page: [...]
# }
page[item_index][1] = result.left_page
page.splice item_index + 1, 0,
[result.middle_key, result.right_page]
splitPage false, storage, order, page, callback
else
# Leaf
step ->
# Found dublicate
if item and sort(item[0], key) is 0
unless conflictManager
throw 'Can\'t insert item w/ dublicate key'
# Invoke conflictManager
step ->
storage.read item[1], @parallel()
, (err, old_value) ->
if err
throw err
@parallel() null, old_value
conflictManager old_value, value, @parallel()
, @parallel()
return
@parallel() null, value
, (err, value, old_value) ->
if err
throw err
# Value should be firstly written in storage
item_index = if item_index is null then 0 else item_index + 1
storage.write [value, old_value], @parallel()
, (err, value) ->
if err
callback err
return
# Then inserted in leaf page
page.splice item_index, 0, [key, value, 1]
splitPage true, storage, order, page, callback
step ->
# Read initial data
storage.readRoot @parallel()
, (err, root) ->
if err
throw err
# Initiate sequence
iterate root, @parallel()
, (err, result) ->
if err
throw err
if storage.isPosition result
# Write new root
@parallel() null, result
else
# Split root
storage.write [
[null, result.left_page],
[result.middle_key, result.right_page]
], @parallel()
, (err, new_root_pos) ->
if err
throw err
storage.writeRoot new_root_pos, @parallel()
, callback
###
Check page length
If exceed - split it into two and return left_page, right_page, middle_key
###
splitPage = (in_leaf, storage, order, page, callback) ->
# If item needs to be splitted
if page.length > order
mid_index = page.length >> 1
mid_key = page[mid_index][0]
# Write splitted pages
step ->
left_page = page[0...mid_index]
storage.write left_page, @parallel()
right_page = page[mid_index...]
right_page[0][0] = null unless in_leaf
storage.write right_page, @parallel()
, (err, left_page, right_page) ->
callback err, {
left_page: left_page,
middle_key: mid_key,
right_page: right_page
}
else
# Just overwrite it
storage.write page, callback
| true | ###
Set functionality for Node index module
This software is licensed under the MIT License.
Copyright PI:NAME:<NAME>END_PI, 2011.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
###
step = require 'step'
utils = require '../../index/utils'
###
Set
###
exports.set = (key, value, _callback) ->
sort = @sort
order = @order
storage = @storage
conflictManager = @conflictManager
if @lock(=> @set key, value, _callback)
return
callback = (err, data) =>
@releaseLock()
process.nextTick ->
_callback and _callback err, data
iterate = (page, callback) ->
item_index = utils.search page, sort, key
item = page[item_index]
if item and not item[2]
# Index
# Read next page and try to insert kv in it
step ->
storage.read item[1], @parallel()
, (err, page) ->
if err
throw err
iterate page, @parallel()
, (err, result) ->
if err
callback err
return
if storage.isPosition result
# Page is just should be overwrited
page[item_index][1] = result
storage.write page, callback
else
# Result is = {
# left_page: [...],
# middle_key: ...,
# right_page: [...]
# }
page[item_index][1] = result.left_page
page.splice item_index + 1, 0,
[result.middle_key, result.right_page]
splitPage false, storage, order, page, callback
else
# Leaf
step ->
# Found dublicate
if item and sort(item[0], key) is 0
unless conflictManager
throw 'Can\'t insert item w/ dublicate key'
# Invoke conflictManager
step ->
storage.read item[1], @parallel()
, (err, old_value) ->
if err
throw err
@parallel() null, old_value
conflictManager old_value, value, @parallel()
, @parallel()
return
@parallel() null, value
, (err, value, old_value) ->
if err
throw err
# Value should be firstly written in storage
item_index = if item_index is null then 0 else item_index + 1
storage.write [value, old_value], @parallel()
, (err, value) ->
if err
callback err
return
# Then inserted in leaf page
page.splice item_index, 0, [key, value, 1]
splitPage true, storage, order, page, callback
step ->
# Read initial data
storage.readRoot @parallel()
, (err, root) ->
if err
throw err
# Initiate sequence
iterate root, @parallel()
, (err, result) ->
if err
throw err
if storage.isPosition result
# Write new root
@parallel() null, result
else
# Split root
storage.write [
[null, result.left_page],
[result.middle_key, result.right_page]
], @parallel()
, (err, new_root_pos) ->
if err
throw err
storage.writeRoot new_root_pos, @parallel()
, callback
###
Check page length
If exceed - split it into two and return left_page, right_page, middle_key
###
splitPage = (in_leaf, storage, order, page, callback) ->
# If item needs to be splitted
if page.length > order
mid_index = page.length >> 1
mid_key = page[mid_index][0]
# Write splitted pages
step ->
left_page = page[0...mid_index]
storage.write left_page, @parallel()
right_page = page[mid_index...]
right_page[0][0] = null unless in_leaf
storage.write right_page, @parallel()
, (err, left_page, right_page) ->
callback err, {
left_page: left_page,
middle_key: mid_key,
right_page: right_page
}
else
# Just overwrite it
storage.write page, callback
|
[
{
"context": " ObjectId('50bf85a816b4f6bff4000001'), username: 'test1', primaryEmail: 'test1@test.com', password: 'test",
"end": 288,
"score": 0.9995442628860474,
"start": 283,
"tag": "USERNAME",
"value": "test1"
},
{
"context": "f6bff4000001'), username: 'test1', primaryEmail: 'test... | test/support/sample-users.coffee | codedoctor/mongoose-user-store-multi-tenant | 4 | async = require 'async'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
module.exports = class SampleUsers
users: [
{_tenantId : new ObjectId('52998e1c32e5724771000009'), _id: new ObjectId('50bf85a816b4f6bff4000001'), username: 'test1', primaryEmail: 'test1@test.com', password: 'test1', displayName: 'Test 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000002'), username: 'test2', primaryEmail: 'test2@test.com', password: 'test2', displayName: 'Test 2'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000003'), username: 'test3', primaryEmail: 'test3@test.com', password: 'test3', displayName: 'Test 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000004'), username: 'alpha1', primaryEmail: 'alpha1@test.com', password: 'test3', displayName: 'Alpha 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000005'), username: 'alpha2', primaryEmail: 'alpha2@test.com', password: 'test3', displayName: 'Alpha 2'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000006'), username: 'alpha3', primaryEmail: 'alpha3@test.com', password: 'test3', displayName: 'Alpha 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000007'), username: 'alpha4', primaryEmail: 'alpha4@test.com', password: 'test3', displayName: 'Alpha 4'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000008'), username: 'alpha5', primaryEmail: 'alpha5@test.com', password: 'test3', displayName: 'Alpha 5'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000009'), username: 'alpha6', primaryEmail: 'alpha6@test.com', password: 'test3', displayName: 'Alpha 6'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000A'), username: 'alpha7', primaryEmail: 'alpha7@test.com', password: 'test3', displayName: 'Alpha 7'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000B'), username: 'alpha8', primaryEmail: 'alpha8@test.com', password: 'test3', displayName: 'Alpha 8'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000C'), username: 'alpha9', primaryEmail: 'alpha9@test.com', password: 'test3', displayName: 'Alpha 9'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000D'), username: 'alphaA', primaryEmail: 'alphaa@test.com', password: 'test3', displayName: 'Alpha 10'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000E'), username: 'alphaB', primaryEmail: 'alphab@test.com', password: 'test3', displayName: 'Alpha 11'}
]
constructor: (@mongo) ->
existingUserIds: (howMany = 3) =>
_.first _.map(_.pluck( @users, '_id'), (x) -> x.toString() ), howMany
nonExistingUserIds: (howMany = 3) =>
_.first ['500f85a816b4f6bff4000000','510f85a816b4f6bff4000000','520f85a816b4f6bff4000000'], howMany
setup: (cb) =>
addOneUser = (user,done) =>
@mongo.collection("identitymt.users").save user,done
async.forEach @users, addOneUser, (err) =>
cb err
| 132751 | async = require 'async'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
module.exports = class SampleUsers
users: [
{_tenantId : new ObjectId('52998e1c32e5724771000009'), _id: new ObjectId('50bf85a816b4f6bff4000001'), username: 'test1', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Test 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000002'), username: 'test2', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Test 2'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000003'), username: 'test3', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Test 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000004'), username: 'alpha1', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000005'), username: 'alpha2', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 2'}
{_tenantId : new ObjectId('52998e1c32e5724<PASSWORD>0000<PASSWORD>9'),_id: new ObjectId('50bf85a816b4f6bff4000006'), username: 'alpha3', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000007'), username: 'alpha4', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 4'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000008'), username: 'alpha5', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 5'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000009'), username: 'alpha6', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 6'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000A'), username: 'alpha7', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 7'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000B'), username: 'alpha8', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 8'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000C'), username: 'alpha9', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 9'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000D'), username: 'alphaA', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 10'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000E'), username: 'alphaB', primaryEmail: '<EMAIL>', password: '<PASSWORD>', displayName: 'Alpha 11'}
]
constructor: (@mongo) ->
existingUserIds: (howMany = 3) =>
_.first _.map(_.pluck( @users, '_id'), (x) -> x.toString() ), howMany
nonExistingUserIds: (howMany = 3) =>
_.first ['500f85a816b4f6bff4000000','510f85a816b4f6bff4000000','520f85a816b4f6bff4000000'], howMany
setup: (cb) =>
addOneUser = (user,done) =>
@mongo.collection("identitymt.users").save user,done
async.forEach @users, addOneUser, (err) =>
cb err
| true | async = require 'async'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
module.exports = class SampleUsers
users: [
{_tenantId : new ObjectId('52998e1c32e5724771000009'), _id: new ObjectId('50bf85a816b4f6bff4000001'), username: 'test1', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Test 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000002'), username: 'test2', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Test 2'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000003'), username: 'test3', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Test 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000004'), username: 'alpha1', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 1'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000005'), username: 'alpha2', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 2'}
{_tenantId : new ObjectId('52998e1c32e5724PI:PASSWORD:<PASSWORD>END_PI0000PI:PASSWORD:<PASSWORD>END_PI9'),_id: new ObjectId('50bf85a816b4f6bff4000006'), username: 'alpha3', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 3'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000007'), username: 'alpha4', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 4'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000008'), username: 'alpha5', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 5'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff4000009'), username: 'alpha6', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 6'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000A'), username: 'alpha7', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 7'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000B'), username: 'alpha8', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 8'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000C'), username: 'alpha9', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 9'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000D'), username: 'alphaA', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 10'}
{_tenantId : new ObjectId('52998e1c32e5724771000009'),_id: new ObjectId('50bf85a816b4f6bff400000E'), username: 'alphaB', primaryEmail: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI', displayName: 'Alpha 11'}
]
constructor: (@mongo) ->
existingUserIds: (howMany = 3) =>
_.first _.map(_.pluck( @users, '_id'), (x) -> x.toString() ), howMany
nonExistingUserIds: (howMany = 3) =>
_.first ['500f85a816b4f6bff4000000','510f85a816b4f6bff4000000','520f85a816b4f6bff4000000'], howMany
setup: (cb) =>
addOneUser = (user,done) =>
@mongo.collection("identitymt.users").save user,done
async.forEach @users, addOneUser, (err) =>
cb err
|
[
{
"context": " 2015\n# The MIT License (MIT)\n# Copyright (c) 2015 Dustin Dowell\n# github.com/dustindowell22/element-catch\n# =====",
"end": 98,
"score": 0.9998587369918823,
"start": 85,
"tag": "NAME",
"value": "Dustin Dowell"
},
{
"context": ")\n# Copyright (c) 2015 Dustin Dowell\n... | jquery.element-catch.coffee | dustindowell22/menu-catch | 2 | # Element Catch - 1.0.6
# June 20, 2015
# The MIT License (MIT)
# Copyright (c) 2015 Dustin Dowell
# github.com/dustindowell22/element-catch
# ==============================================
(($) ->
$.fn.elementCatch = (scrollClass = 'scrolled') ->
# Cache this object
$this = $(this)
# Distance from top of document
distanceTop = $this.offset().top
# Self-initiating Element Catch
do elementCatch = ->
# Scrollbar distance from top of document
scrollPosition = $(document).scrollTop()
# Add class if scroll position is larger than distance from top of document
if scrollPosition > distanceTop
if not $this.hasClass(scrollClass)
$this.addClass(scrollClass)
else
$this.removeClass(scrollClass)
# Call on events
$(window).on('scroll resize orientationchange', elementCatch)
# Allow chaining
return this
) jQuery
| 140490 | # Element Catch - 1.0.6
# June 20, 2015
# The MIT License (MIT)
# Copyright (c) 2015 <NAME>
# github.com/dustindowell22/element-catch
# ==============================================
(($) ->
$.fn.elementCatch = (scrollClass = 'scrolled') ->
# Cache this object
$this = $(this)
# Distance from top of document
distanceTop = $this.offset().top
# Self-initiating Element Catch
do elementCatch = ->
# Scrollbar distance from top of document
scrollPosition = $(document).scrollTop()
# Add class if scroll position is larger than distance from top of document
if scrollPosition > distanceTop
if not $this.hasClass(scrollClass)
$this.addClass(scrollClass)
else
$this.removeClass(scrollClass)
# Call on events
$(window).on('scroll resize orientationchange', elementCatch)
# Allow chaining
return this
) jQuery
| true | # Element Catch - 1.0.6
# June 20, 2015
# The MIT License (MIT)
# Copyright (c) 2015 PI:NAME:<NAME>END_PI
# github.com/dustindowell22/element-catch
# ==============================================
(($) ->
$.fn.elementCatch = (scrollClass = 'scrolled') ->
# Cache this object
$this = $(this)
# Distance from top of document
distanceTop = $this.offset().top
# Self-initiating Element Catch
do elementCatch = ->
# Scrollbar distance from top of document
scrollPosition = $(document).scrollTop()
# Add class if scroll position is larger than distance from top of document
if scrollPosition > distanceTop
if not $this.hasClass(scrollClass)
$this.addClass(scrollClass)
else
$this.removeClass(scrollClass)
# Call on events
$(window).on('scroll resize orientationchange', elementCatch)
# Allow chaining
return this
) jQuery
|
[
{
"context": "ule, cb) ->\n\t\t\tActuatorRule.request \"byRule\", key: rule.id, (err, actuatorRules)->\n\t\t\t\tif err\n\t\t\t\t\tcallback ",
"end": 7930,
"score": 0.9405938982963562,
"start": 7923,
"tag": "KEY",
"value": "rule.id"
}
] | server/models/rule.coffee | Aldream/cozy-nest | 1 | ###
# =================
# SCHEMA - Rule
# =================
# Defines a Rule, binding Sensors Measures and Actuators together.
###
cozydb = require 'cozydb'
async = require 'async'
SensorRule = require './sensorRule'
ActuatorRule = require './actuatorRule'
Measure = require './measure'
module.exports = class Rule extends cozydb.CozyModel
@schema:
name: type : String # not Empty
nbSensorRules: type : Number, default : 0
nbSensorRulesMet: type : Number, default : 0
###
# incrementNbSensorRules
# ====
# Increments (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules+1, callback
###
# decrementNbSensorRules
# ====
# Decrements (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules-1, callback
###
# incrementNbSensorRulesMet
# ====
# Increments (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet+1, callback
###
# decrementNbSensorRulesMet
# ====
# Decrements (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet-1, callback
###
# createSensorRule
# ====
# Generates a SensorRule (ie. condition) associated to this Rule.
# @param data (dictionary): SensorRule's data (sensorId, type, intervalStart, intervalEnd)
# @param callback (Function(Error, SensorRule):null): Callback
###
createSensorRule: (data, callback) ->
data.ruleId = @id
rule = @
cb = (err, sensorRule) ->
if err
callback err, sensorRule
return
# Incrementing the number of SensorRules this rule has:
rule.incrementNbSensorRules (err2) ->
if err2
# Cancelling modif:
SensorRule.requestDestroy "all", {key: sensorRule.id}, (err3) ->
if err3
err2 += " AND " + err3
callback err2, null
else
callback null, sensorRule
# If the SensorRule (ie. condition) is already met, we let the Rule know:
if sensorRule.met
rule.incrementNbSensorRulesMet (err2) -> callback err2, sensorRule
SensorRule.create data, cb
###
# destroySensorRule
# ====
# Destroys a SensorRule, and updates the information of the Rule it belonged to.
# @param sensorRule (SensorRule): SensorRule to be deleted, belonging to the Rule
# @param callback (Function(Error):null): Callback
###
destroySensorRule: (sensorRule, callback) ->
rule = @
# Decrementing the number of SensorRules this rule has:
unless sensorRule.ruleId == @id
callback "The SensorRule doesn't belong to this Rule."
return
async.parallel [
(cb) -> rule.decrementNbSensorRules cb
,
(cb) ->
if sensorRule.met
rule.decrementNbSensorRulesMet cb
else
cb null
], (err, res) ->
if err
callback err
return
sensorRule.destroy (err2) ->
if err2
# Cancelling modif:
rule.incrementNbSensorRules callback
return
### @todo Think about use-cases before uncommenting below/
if rule.nbSensorRules == rule.nbSensorRulesMet
# The remaining conditions are met, so we apply the rule:
Rule.applyRules [rule], callback
return
###
callback null
###
# createActuatorRule
# ====
# Generates an ActuatorRule (ie. reaction) associated to this Rule.
# @param data (dictionary): ActuatorRule's data (type, value, isActive)
# @param callback (Function(Error, ActuatorRule):null): Callback
###
createActuatorRule: (data, callback) ->
data.ruleId = @id
ActuatorRule.create data, callback
###
# destroy
# ====
# Deletes the Rule, and its SensorRules and ActuatorRules
# @param callback (Function(Error):null): Callback
###
destroy: (callback) ->
superDestroy = (callback) => super callback
params = key: @id
async.parallel [
(cb) ->
SensorRule.request "byRule", params, (err, sensorRules)->
if err
cb 'Error while deleting the conditions (SensorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting SensorRule: ' + partialErr + '\n'
sensorRule.destroy partialCallback for sensorRule in sensorRules
if msgErr?
cb msgErr
,
(cb) ->
ActuatorRule.request "byRule", params, (err, actuatorRules)->
if err
cb 'Error while deleting the conditions (ActuatorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting ActuatorRule: ' + partialErr + '\n'
actuatorRule.destroy partialCallback for actuatorRule in actuatorRules
if msgErr?
cb msgErr
], (err, results) ->
return callback err if err
superDestroy callback
###
# checkMetRules
# ====
# Checks the rules which have all their SensorRules (conditions) met when taking into account the given Measure.
# @param measure (Measure): Measure to take into account
# @param callback (Function(Error, Rule[]):null): Callback
###
@checkMetRules: (measure, callback) ->
# First find the SensorRules
param =
key: [measure.sensorId, measure.type]
SensorRule.request 'bySensorIdAndType', param, (err, sensorRules)->
if err
callback err, null
return
# @todo async.map stops and calls the callback at the first returned error. We might not want such a behavior...
async.map sensorRules, ((sensorRule, cb) ->
if (!sensorRule.intervalEnd || measure.value < sensorRule.intervalEnd) && (!sensorRule.intervalStart || measure.value >= sensorRule.intervalStart)
# If the measure triggers the SensorRule, update the corresponding Rule:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
isRuleMet = (rule.nbSensorRulesMet + 1) == rule.nbSensorRules
rule.incrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to increment its number of met SensorRules): '+err, isRuleMet? rule:null
return
sensorRule.updateAttributes met: true, ((err) -> cb err, isRuleMet? rule:null)
else
if sensorRule.met
# If the conditions was met, it is not the case anymore.
# We thus have to decrement the number of met SensorRules of the corresponding rule, and update the SensorRule itself:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
rule.decrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to decrement its number of met SensorRules): '+err, null
return
sensorRule.updateAttributes met: false, ((err) -> cb err, null)
else
callback null, null
), callback
###
# applyRules
# ====
# Applies the ActuatorRules (reactions) of the given rules.
# @param rules (Rules[]): Rules
# @param callback (Function(Error):null): Callback
###
@applyRules: (rules, callback) ->
# @todo async.each stops and calls the callback at the first returned error. We might not want such a behavior...
async.each rules, ((rule, cb) ->
ActuatorRule.request "byRule", key: rule.id, (err, actuatorRules)->
if err
callback 'Error while finding ActuatorRules associated to Rule #'+rule.id+': '+err
return
async.each actuatorRules, ((actuatorRule, cb2) ->
actuatorRule.apply cb2
), cb
), callback | 181969 | ###
# =================
# SCHEMA - Rule
# =================
# Defines a Rule, binding Sensors Measures and Actuators together.
###
cozydb = require 'cozydb'
async = require 'async'
SensorRule = require './sensorRule'
ActuatorRule = require './actuatorRule'
Measure = require './measure'
module.exports = class Rule extends cozydb.CozyModel
@schema:
name: type : String # not Empty
nbSensorRules: type : Number, default : 0
nbSensorRulesMet: type : Number, default : 0
###
# incrementNbSensorRules
# ====
# Increments (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules+1, callback
###
# decrementNbSensorRules
# ====
# Decrements (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules-1, callback
###
# incrementNbSensorRulesMet
# ====
# Increments (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet+1, callback
###
# decrementNbSensorRulesMet
# ====
# Decrements (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet-1, callback
###
# createSensorRule
# ====
# Generates a SensorRule (ie. condition) associated to this Rule.
# @param data (dictionary): SensorRule's data (sensorId, type, intervalStart, intervalEnd)
# @param callback (Function(Error, SensorRule):null): Callback
###
createSensorRule: (data, callback) ->
data.ruleId = @id
rule = @
cb = (err, sensorRule) ->
if err
callback err, sensorRule
return
# Incrementing the number of SensorRules this rule has:
rule.incrementNbSensorRules (err2) ->
if err2
# Cancelling modif:
SensorRule.requestDestroy "all", {key: sensorRule.id}, (err3) ->
if err3
err2 += " AND " + err3
callback err2, null
else
callback null, sensorRule
# If the SensorRule (ie. condition) is already met, we let the Rule know:
if sensorRule.met
rule.incrementNbSensorRulesMet (err2) -> callback err2, sensorRule
SensorRule.create data, cb
###
# destroySensorRule
# ====
# Destroys a SensorRule, and updates the information of the Rule it belonged to.
# @param sensorRule (SensorRule): SensorRule to be deleted, belonging to the Rule
# @param callback (Function(Error):null): Callback
###
destroySensorRule: (sensorRule, callback) ->
rule = @
# Decrementing the number of SensorRules this rule has:
unless sensorRule.ruleId == @id
callback "The SensorRule doesn't belong to this Rule."
return
async.parallel [
(cb) -> rule.decrementNbSensorRules cb
,
(cb) ->
if sensorRule.met
rule.decrementNbSensorRulesMet cb
else
cb null
], (err, res) ->
if err
callback err
return
sensorRule.destroy (err2) ->
if err2
# Cancelling modif:
rule.incrementNbSensorRules callback
return
### @todo Think about use-cases before uncommenting below/
if rule.nbSensorRules == rule.nbSensorRulesMet
# The remaining conditions are met, so we apply the rule:
Rule.applyRules [rule], callback
return
###
callback null
###
# createActuatorRule
# ====
# Generates an ActuatorRule (ie. reaction) associated to this Rule.
# @param data (dictionary): ActuatorRule's data (type, value, isActive)
# @param callback (Function(Error, ActuatorRule):null): Callback
###
createActuatorRule: (data, callback) ->
data.ruleId = @id
ActuatorRule.create data, callback
###
# destroy
# ====
# Deletes the Rule, and its SensorRules and ActuatorRules
# @param callback (Function(Error):null): Callback
###
destroy: (callback) ->
superDestroy = (callback) => super callback
params = key: @id
async.parallel [
(cb) ->
SensorRule.request "byRule", params, (err, sensorRules)->
if err
cb 'Error while deleting the conditions (SensorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting SensorRule: ' + partialErr + '\n'
sensorRule.destroy partialCallback for sensorRule in sensorRules
if msgErr?
cb msgErr
,
(cb) ->
ActuatorRule.request "byRule", params, (err, actuatorRules)->
if err
cb 'Error while deleting the conditions (ActuatorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting ActuatorRule: ' + partialErr + '\n'
actuatorRule.destroy partialCallback for actuatorRule in actuatorRules
if msgErr?
cb msgErr
], (err, results) ->
return callback err if err
superDestroy callback
###
# checkMetRules
# ====
# Checks the rules which have all their SensorRules (conditions) met when taking into account the given Measure.
# @param measure (Measure): Measure to take into account
# @param callback (Function(Error, Rule[]):null): Callback
###
@checkMetRules: (measure, callback) ->
# First find the SensorRules
param =
key: [measure.sensorId, measure.type]
SensorRule.request 'bySensorIdAndType', param, (err, sensorRules)->
if err
callback err, null
return
# @todo async.map stops and calls the callback at the first returned error. We might not want such a behavior...
async.map sensorRules, ((sensorRule, cb) ->
if (!sensorRule.intervalEnd || measure.value < sensorRule.intervalEnd) && (!sensorRule.intervalStart || measure.value >= sensorRule.intervalStart)
# If the measure triggers the SensorRule, update the corresponding Rule:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
isRuleMet = (rule.nbSensorRulesMet + 1) == rule.nbSensorRules
rule.incrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to increment its number of met SensorRules): '+err, isRuleMet? rule:null
return
sensorRule.updateAttributes met: true, ((err) -> cb err, isRuleMet? rule:null)
else
if sensorRule.met
# If the conditions was met, it is not the case anymore.
# We thus have to decrement the number of met SensorRules of the corresponding rule, and update the SensorRule itself:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
rule.decrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to decrement its number of met SensorRules): '+err, null
return
sensorRule.updateAttributes met: false, ((err) -> cb err, null)
else
callback null, null
), callback
###
# applyRules
# ====
# Applies the ActuatorRules (reactions) of the given rules.
# @param rules (Rules[]): Rules
# @param callback (Function(Error):null): Callback
###
@applyRules: (rules, callback) ->
# @todo async.each stops and calls the callback at the first returned error. We might not want such a behavior...
async.each rules, ((rule, cb) ->
ActuatorRule.request "byRule", key: <KEY>, (err, actuatorRules)->
if err
callback 'Error while finding ActuatorRules associated to Rule #'+rule.id+': '+err
return
async.each actuatorRules, ((actuatorRule, cb2) ->
actuatorRule.apply cb2
), cb
), callback | true | ###
# =================
# SCHEMA - Rule
# =================
# Defines a Rule, binding Sensors Measures and Actuators together.
###
cozydb = require 'cozydb'
async = require 'async'
SensorRule = require './sensorRule'
ActuatorRule = require './actuatorRule'
Measure = require './measure'
module.exports = class Rule extends cozydb.CozyModel
@schema:
name: type : String # not Empty
nbSensorRules: type : Number, default : 0
nbSensorRulesMet: type : Number, default : 0
###
# incrementNbSensorRules
# ====
# Increments (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules+1, callback
###
# decrementNbSensorRules
# ====
# Decrements (by 1) the number of SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRules: (callback) ->
@updateAttributes nbSensorRules: @nbSensorRules-1, callback
###
# incrementNbSensorRulesMet
# ====
# Increments (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
incrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet+1, callback
###
# decrementNbSensorRulesMet
# ====
# Decrements (by 1) the number of fulfilled SensorRules this rule has.
# @param callback (Function(Error):null): Callback
###
decrementNbSensorRulesMet: (callback) ->
@updateAttributes nbSensorRulesMet: @nbSensorRulesMet-1, callback
###
# createSensorRule
# ====
# Generates a SensorRule (ie. condition) associated to this Rule.
# @param data (dictionary): SensorRule's data (sensorId, type, intervalStart, intervalEnd)
# @param callback (Function(Error, SensorRule):null): Callback
###
createSensorRule: (data, callback) ->
data.ruleId = @id
rule = @
cb = (err, sensorRule) ->
if err
callback err, sensorRule
return
# Incrementing the number of SensorRules this rule has:
rule.incrementNbSensorRules (err2) ->
if err2
# Cancelling modif:
SensorRule.requestDestroy "all", {key: sensorRule.id}, (err3) ->
if err3
err2 += " AND " + err3
callback err2, null
else
callback null, sensorRule
# If the SensorRule (ie. condition) is already met, we let the Rule know:
if sensorRule.met
rule.incrementNbSensorRulesMet (err2) -> callback err2, sensorRule
SensorRule.create data, cb
###
# destroySensorRule
# ====
# Destroys a SensorRule, and updates the information of the Rule it belonged to.
# @param sensorRule (SensorRule): SensorRule to be deleted, belonging to the Rule
# @param callback (Function(Error):null): Callback
###
destroySensorRule: (sensorRule, callback) ->
rule = @
# Decrementing the number of SensorRules this rule has:
unless sensorRule.ruleId == @id
callback "The SensorRule doesn't belong to this Rule."
return
async.parallel [
(cb) -> rule.decrementNbSensorRules cb
,
(cb) ->
if sensorRule.met
rule.decrementNbSensorRulesMet cb
else
cb null
], (err, res) ->
if err
callback err
return
sensorRule.destroy (err2) ->
if err2
# Cancelling modif:
rule.incrementNbSensorRules callback
return
### @todo Think about use-cases before uncommenting below/
if rule.nbSensorRules == rule.nbSensorRulesMet
# The remaining conditions are met, so we apply the rule:
Rule.applyRules [rule], callback
return
###
callback null
###
# createActuatorRule
# ====
# Generates an ActuatorRule (ie. reaction) associated to this Rule.
# @param data (dictionary): ActuatorRule's data (type, value, isActive)
# @param callback (Function(Error, ActuatorRule):null): Callback
###
createActuatorRule: (data, callback) ->
data.ruleId = @id
ActuatorRule.create data, callback
###
# destroy
# ====
# Deletes the Rule, and its SensorRules and ActuatorRules
# @param callback (Function(Error):null): Callback
###
destroy: (callback) ->
superDestroy = (callback) => super callback
params = key: @id
async.parallel [
(cb) ->
SensorRule.request "byRule", params, (err, sensorRules)->
if err
cb 'Error while deleting the conditions (SensorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting SensorRule: ' + partialErr + '\n'
sensorRule.destroy partialCallback for sensorRule in sensorRules
if msgErr?
cb msgErr
,
(cb) ->
ActuatorRule.request "byRule", params, (err, actuatorRules)->
if err
cb 'Error while deleting the conditions (ActuatorRules) associated: '+err
msgErr = ''
partialCallback = (partialErr) ->
msgErr += 'Error while deleting ActuatorRule: ' + partialErr + '\n'
actuatorRule.destroy partialCallback for actuatorRule in actuatorRules
if msgErr?
cb msgErr
], (err, results) ->
return callback err if err
superDestroy callback
###
# checkMetRules
# ====
# Checks the rules which have all their SensorRules (conditions) met when taking into account the given Measure.
# @param measure (Measure): Measure to take into account
# @param callback (Function(Error, Rule[]):null): Callback
###
@checkMetRules: (measure, callback) ->
# First find the SensorRules
param =
key: [measure.sensorId, measure.type]
SensorRule.request 'bySensorIdAndType', param, (err, sensorRules)->
if err
callback err, null
return
# @todo async.map stops and calls the callback at the first returned error. We might not want such a behavior...
async.map sensorRules, ((sensorRule, cb) ->
if (!sensorRule.intervalEnd || measure.value < sensorRule.intervalEnd) && (!sensorRule.intervalStart || measure.value >= sensorRule.intervalStart)
# If the measure triggers the SensorRule, update the corresponding Rule:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
isRuleMet = (rule.nbSensorRulesMet + 1) == rule.nbSensorRules
rule.incrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to increment its number of met SensorRules): '+err, isRuleMet? rule:null
return
sensorRule.updateAttributes met: true, ((err) -> cb err, isRuleMet? rule:null)
else
if sensorRule.met
# If the conditions was met, it is not the case anymore.
# We thus have to decrement the number of met SensorRules of the corresponding rule, and update the SensorRule itself:
Rule.find sensorRule.ruleId (err, rule) ->
if err
cb 'Error when finding Rule related to SensorRule #'+sensorRule.id+': '+err, null
return
if !rule
cb 'Rule related to SensorRule #'+sensorRule.id+' not found.', null
return
rule.decrementNbSensorRulesMet (err) ->
if err
cb 'Error when updating Rule related to SensorRule #'+sensorRule.id+' (to decrement its number of met SensorRules): '+err, null
return
sensorRule.updateAttributes met: false, ((err) -> cb err, null)
else
callback null, null
), callback
###
# applyRules
# ====
# Applies the ActuatorRules (reactions) of the given rules.
# @param rules (Rules[]): Rules
# @param callback (Function(Error):null): Callback
###
@applyRules: (rules, callback) ->
# @todo async.each stops and calls the callback at the first returned error. We might not want such a behavior...
async.each rules, ((rule, cb) ->
ActuatorRule.request "byRule", key: PI:KEY:<KEY>END_PI, (err, actuatorRules)->
if err
callback 'Error while finding ActuatorRules associated to Rule #'+rule.id+': '+err
return
async.each actuatorRules, ((actuatorRule, cb2) ->
actuatorRule.apply cb2
), cb
), callback |
[
{
"context": "mponent.searchResults [\n { id: '123', name: 'Berlin', public: true }\n { id: '456', name: 'China'",
"end": 2597,
"score": 0.8290972709655762,
"start": 2591,
"tag": "NAME",
"value": "Berlin"
},
{
"context": "earch.args[2][0].collection[0].name.should.equal 'Be... | src/client/apps/settings/test/client/tags.test.coffee | craigspaeth/positron | 76 | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'TagsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
{ TagsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../client/tags')
[]
)
mod.__set__ 'FilterSearch', @FilterSearch = sinon.stub()
mod.__set__ 'AddTag', @AddTag = sinon.stub()
mod.__set__ 'sd', { API_URL: 'https://writer.artsy.net/api' }
@request =
post: sinon.stub().returns
set: sinon.stub().returns
send: sinon.stub().returns
end: sinon.stub().yields(
null,
body: data: tags: [fixtures().tags]
)
mod.__set__ 'request', @request
sinon.stub Backbone, 'sync'
props = {
tags: [fixtures().tags]
public: true
}
@rendered = ReactDOMServer.renderToString(
React.createElement(TagsView, props)
)
@component = ReactDOM.render(
React.createElement(TagsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders nav with active state', ->
@rendered.should.containEql '"topic is-active"'
@rendered.should.containEql '"internal "'
it 'switches active state when tab is clicked', ->
r.simulate.click r.find @component, 'internal'
$(@component.refs.internalTab).hasClass('is-active').should.be.true()
it 'inits FilterSearch component', ->
@FilterSearch.args[0][0].url.should.equal 'https://writer.artsy.net/api/tags?public=true&limit=50&q=%QUERY'
@FilterSearch.args[0][0].placeholder.should.equal 'Search for tag...'
@FilterSearch.args[0][0].collection[0].name.should.equal 'Show Reviews'
@FilterSearch.args[0][0].contentType.should.equal 'tag'
it 'inits AddTag component', ->
@AddTag.called.should.be.true()
it '#getActiveState', ->
@component.getActiveState(true).should.equal 'is-active'
@component.setState(public: false)
@component.getActiveState(true).should.equal ''
it 'updates #searchResults', ->
@component.searchResults [
{ id: '123', name: 'Berlin', public: true }
{ id: '456', name: 'China', public: true }
]
@FilterSearch.args[2][0].collection.length.should.equal 2
@FilterSearch.args[2][0].collection[0].name.should.equal 'Berlin'
@FilterSearch.args[2][0].collection[1].name.should.equal 'China'
@FilterSearch.args[2][0].collection[0].public.should.be.true()
@FilterSearch.args[2][0].collection[1].public.should.be.true()
describe '#setView', ->
it 'sets topic tag view', ->
@component.setView true
@component.state.public.should.be.true()
@component.state.tags.length.should.equal 1
it 'sets internal tag view', ->
@component.setView false
@component.state.public.should.be.false()
@component.state.tags.length.should.equal 1
describe '#addTag', ->
it 'adds tag on success', ->
@component.addTag 'New Tag'
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].attributes.name.should.equal 'New Tag'
Backbone.sync.args[0][1].attributes.public.should.be.true()
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 2
it 'displays an error message on error', ->
@component.addTag 'Repeat Tag'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
describe '#deleteTag', ->
it 'removes tag from page on delete', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 0
it 'displays an error message on error', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
| 33161 | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'TagsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
{ TagsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../client/tags')
[]
)
mod.__set__ 'FilterSearch', @FilterSearch = sinon.stub()
mod.__set__ 'AddTag', @AddTag = sinon.stub()
mod.__set__ 'sd', { API_URL: 'https://writer.artsy.net/api' }
@request =
post: sinon.stub().returns
set: sinon.stub().returns
send: sinon.stub().returns
end: sinon.stub().yields(
null,
body: data: tags: [fixtures().tags]
)
mod.__set__ 'request', @request
sinon.stub Backbone, 'sync'
props = {
tags: [fixtures().tags]
public: true
}
@rendered = ReactDOMServer.renderToString(
React.createElement(TagsView, props)
)
@component = ReactDOM.render(
React.createElement(TagsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders nav with active state', ->
@rendered.should.containEql '"topic is-active"'
@rendered.should.containEql '"internal "'
it 'switches active state when tab is clicked', ->
r.simulate.click r.find @component, 'internal'
$(@component.refs.internalTab).hasClass('is-active').should.be.true()
it 'inits FilterSearch component', ->
@FilterSearch.args[0][0].url.should.equal 'https://writer.artsy.net/api/tags?public=true&limit=50&q=%QUERY'
@FilterSearch.args[0][0].placeholder.should.equal 'Search for tag...'
@FilterSearch.args[0][0].collection[0].name.should.equal 'Show Reviews'
@FilterSearch.args[0][0].contentType.should.equal 'tag'
it 'inits AddTag component', ->
@AddTag.called.should.be.true()
it '#getActiveState', ->
@component.getActiveState(true).should.equal 'is-active'
@component.setState(public: false)
@component.getActiveState(true).should.equal ''
it 'updates #searchResults', ->
@component.searchResults [
{ id: '123', name: '<NAME>', public: true }
{ id: '456', name: 'China', public: true }
]
@FilterSearch.args[2][0].collection.length.should.equal 2
@FilterSearch.args[2][0].collection[0].name.should.equal '<NAME>lin'
@FilterSearch.args[2][0].collection[1].name.should.equal 'China'
@FilterSearch.args[2][0].collection[0].public.should.be.true()
@FilterSearch.args[2][0].collection[1].public.should.be.true()
describe '#setView', ->
it 'sets topic tag view', ->
@component.setView true
@component.state.public.should.be.true()
@component.state.tags.length.should.equal 1
it 'sets internal tag view', ->
@component.setView false
@component.state.public.should.be.false()
@component.state.tags.length.should.equal 1
describe '#addTag', ->
it 'adds tag on success', ->
@component.addTag 'New Tag'
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].attributes.name.should.equal 'New Tag'
Backbone.sync.args[0][1].attributes.public.should.be.true()
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 2
it 'displays an error message on error', ->
@component.addTag 'Repeat Tag'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
describe '#deleteTag', ->
it 'removes tag from page on delete', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 0
it 'displays an error message on error', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
| true | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'TagsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
{ TagsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../client/tags')
[]
)
mod.__set__ 'FilterSearch', @FilterSearch = sinon.stub()
mod.__set__ 'AddTag', @AddTag = sinon.stub()
mod.__set__ 'sd', { API_URL: 'https://writer.artsy.net/api' }
@request =
post: sinon.stub().returns
set: sinon.stub().returns
send: sinon.stub().returns
end: sinon.stub().yields(
null,
body: data: tags: [fixtures().tags]
)
mod.__set__ 'request', @request
sinon.stub Backbone, 'sync'
props = {
tags: [fixtures().tags]
public: true
}
@rendered = ReactDOMServer.renderToString(
React.createElement(TagsView, props)
)
@component = ReactDOM.render(
React.createElement(TagsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders nav with active state', ->
@rendered.should.containEql '"topic is-active"'
@rendered.should.containEql '"internal "'
it 'switches active state when tab is clicked', ->
r.simulate.click r.find @component, 'internal'
$(@component.refs.internalTab).hasClass('is-active').should.be.true()
it 'inits FilterSearch component', ->
@FilterSearch.args[0][0].url.should.equal 'https://writer.artsy.net/api/tags?public=true&limit=50&q=%QUERY'
@FilterSearch.args[0][0].placeholder.should.equal 'Search for tag...'
@FilterSearch.args[0][0].collection[0].name.should.equal 'Show Reviews'
@FilterSearch.args[0][0].contentType.should.equal 'tag'
it 'inits AddTag component', ->
@AddTag.called.should.be.true()
it '#getActiveState', ->
@component.getActiveState(true).should.equal 'is-active'
@component.setState(public: false)
@component.getActiveState(true).should.equal ''
it 'updates #searchResults', ->
@component.searchResults [
{ id: '123', name: 'PI:NAME:<NAME>END_PI', public: true }
{ id: '456', name: 'China', public: true }
]
@FilterSearch.args[2][0].collection.length.should.equal 2
@FilterSearch.args[2][0].collection[0].name.should.equal 'PI:NAME:<NAME>END_PIlin'
@FilterSearch.args[2][0].collection[1].name.should.equal 'China'
@FilterSearch.args[2][0].collection[0].public.should.be.true()
@FilterSearch.args[2][0].collection[1].public.should.be.true()
describe '#setView', ->
it 'sets topic tag view', ->
@component.setView true
@component.state.public.should.be.true()
@component.state.tags.length.should.equal 1
it 'sets internal tag view', ->
@component.setView false
@component.state.public.should.be.false()
@component.state.tags.length.should.equal 1
describe '#addTag', ->
it 'adds tag on success', ->
@component.addTag 'New Tag'
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].attributes.name.should.equal 'New Tag'
Backbone.sync.args[0][1].attributes.public.should.be.true()
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 2
it 'displays an error message on error', ->
@component.addTag 'Repeat Tag'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
describe '#deleteTag', ->
it 'removes tag from page on delete', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][2].success fixtures().tags
@FilterSearch.args[2][0].collection.length.should.equal 0
it 'displays an error message on error', ->
@component.deleteTag id: '55356a9deca560a0137aa4b7'
Backbone.sync.args[0][2].error()
$(r.find(@component, 'tags-container')).html().should.containEql(
'There has been an error. Please contact support.'
)
|
[
{
"context": "osen-search-input\").first()\n search_field.val(\"一\")\n search_field.trigger(\"keyup\")\n\n expect(d",
"end": 5596,
"score": 0.6494293808937073,
"start": 5595,
"tag": "NAME",
"value": "一"
}
] | spec/jquery/searching.spec.coffee | addsb/chosen | 44 | describe "Searching", ->
it "should not match the actual text of HTML entities", ->
tmpl = "
<select data-placeholder='Choose an HTML Entity...'>
<option value=''></option>
<option value='This & That'>This & That</option>
<option value='This < That'>This < That</option>
</select>
"
div = $("<div>").html(tmpl)
select = div.find("select")
select.chosen({search_contains: true})
container = div.find(".chosen-container")
container.trigger("mousedown") # open the drop
# Both options should be active
results = div.find(".active-result")
expect(results.length).toBe(2)
# Search for the html entity by name
search_field = div.find(".chosen-search input").first()
search_field.val("mp")
search_field.trigger("keyup")
results = div.find(".active-result")
expect(results.length).toBe(0)
it "renders options correctly when they contain characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="A & B">A & B</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("<em>A</em> & B")
it "renders optgroups correctly when they contain html encoded tags", ->
div = $("<div>").html("""
<select>
<optgroup label="A <b>hi</b> B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A <b>hi</b> B")
it "renders optgroups correctly when they contain characters that require HTML encoding when searching", ->
div = $("<div>").html("""
<select>
<optgroup label="A & B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("<em>A</em> & B")
it "renders no results message correctly when it contains characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="Item">Item</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input").first()
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&</span>")
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&amp;</span>")
it "matches in non-ascii languages like Chinese when selecting a single item", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting a single item with search_contains", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items with search_contains", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "highlights results correctly when multiple words are present", ->
div = $("<div>").html("""
<select>
<option value="oh hello">oh hello</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
search_field = div.find(".chosen-search-input")
search_field.val("h")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("oh <em>h</em>ello")
describe "respects word boundaries when not using search_contains", ->
div = $("<div>").html("""
<select>
<option value="(lparen">(lparen</option>
<option value="<langle"><langle</option>
<option value="[lbrace">[lbrace</option>
<option value="{lcurly">{lcurly</option>
<option value="¡upsidedownbang">¡upsidedownbang</option>
<option value="¿upsidedownqmark">¿upsidedownqmark</option>
<option value=".period">.period</option>
<option value="-dash">-dash</option>
<option value='"leftquote'>"leftquote</option>
<option value="'leftsinglequote">'leftsinglequote</option>
<option value="“angledleftquote">“angledleftquote</option>
<option value="‘angledleftsinglequote">‘angledleftsinglequote</option>
<option value="«guillemet">«guillemet</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input")
div.find("option").each () ->
boundary_thing = this.value.slice(1)
it "correctly finds words that start after a(n) #{boundary_thing}", ->
search_field.val(boundary_thing)
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerText.slice(1)).toBe(boundary_thing)
| 172807 | describe "Searching", ->
it "should not match the actual text of HTML entities", ->
tmpl = "
<select data-placeholder='Choose an HTML Entity...'>
<option value=''></option>
<option value='This & That'>This & That</option>
<option value='This < That'>This < That</option>
</select>
"
div = $("<div>").html(tmpl)
select = div.find("select")
select.chosen({search_contains: true})
container = div.find(".chosen-container")
container.trigger("mousedown") # open the drop
# Both options should be active
results = div.find(".active-result")
expect(results.length).toBe(2)
# Search for the html entity by name
search_field = div.find(".chosen-search input").first()
search_field.val("mp")
search_field.trigger("keyup")
results = div.find(".active-result")
expect(results.length).toBe(0)
it "renders options correctly when they contain characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="A & B">A & B</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("<em>A</em> & B")
it "renders optgroups correctly when they contain html encoded tags", ->
div = $("<div>").html("""
<select>
<optgroup label="A <b>hi</b> B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A <b>hi</b> B")
it "renders optgroups correctly when they contain characters that require HTML encoding when searching", ->
div = $("<div>").html("""
<select>
<optgroup label="A & B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("<em>A</em> & B")
it "renders no results message correctly when it contains characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="Item">Item</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input").first()
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&</span>")
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&amp;</span>")
it "matches in non-ascii languages like Chinese when selecting a single item", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting a single item with search_contains", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("<NAME>")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items with search_contains", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "highlights results correctly when multiple words are present", ->
div = $("<div>").html("""
<select>
<option value="oh hello">oh hello</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
search_field = div.find(".chosen-search-input")
search_field.val("h")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("oh <em>h</em>ello")
describe "respects word boundaries when not using search_contains", ->
div = $("<div>").html("""
<select>
<option value="(lparen">(lparen</option>
<option value="<langle"><langle</option>
<option value="[lbrace">[lbrace</option>
<option value="{lcurly">{lcurly</option>
<option value="¡upsidedownbang">¡upsidedownbang</option>
<option value="¿upsidedownqmark">¿upsidedownqmark</option>
<option value=".period">.period</option>
<option value="-dash">-dash</option>
<option value='"leftquote'>"leftquote</option>
<option value="'leftsinglequote">'leftsinglequote</option>
<option value="“angledleftquote">“angledleftquote</option>
<option value="‘angledleftsinglequote">‘angledleftsinglequote</option>
<option value="«guillemet">«guillemet</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input")
div.find("option").each () ->
boundary_thing = this.value.slice(1)
it "correctly finds words that start after a(n) #{boundary_thing}", ->
search_field.val(boundary_thing)
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerText.slice(1)).toBe(boundary_thing)
| true | describe "Searching", ->
it "should not match the actual text of HTML entities", ->
tmpl = "
<select data-placeholder='Choose an HTML Entity...'>
<option value=''></option>
<option value='This & That'>This & That</option>
<option value='This < That'>This < That</option>
</select>
"
div = $("<div>").html(tmpl)
select = div.find("select")
select.chosen({search_contains: true})
container = div.find(".chosen-container")
container.trigger("mousedown") # open the drop
# Both options should be active
results = div.find(".active-result")
expect(results.length).toBe(2)
# Search for the html entity by name
search_field = div.find(".chosen-search input").first()
search_field.val("mp")
search_field.trigger("keyup")
results = div.find(".active-result")
expect(results.length).toBe(0)
it "renders options correctly when they contain characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="A & B">A & B</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result").first().html()).toBe("<em>A</em> & B")
it "renders optgroups correctly when they contain html encoded tags", ->
div = $("<div>").html("""
<select>
<optgroup label="A <b>hi</b> B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A <b>hi</b> B")
it "renders optgroups correctly when they contain characters that require HTML encoding when searching", ->
div = $("<div>").html("""
<select>
<optgroup label="A & B">
<option value="Item">Item</option>
</optgroup>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("A & B")
search_field = div.find(".chosen-search-input").first()
search_field.val("A")
search_field.trigger("keyup")
expect(div.find(".group-result").length).toBe(1)
expect(div.find(".group-result").first().html()).toBe("<em>A</em> & B")
it "renders no results message correctly when it contains characters that require HTML encoding", ->
div = $("<div>").html("""
<select>
<option value="Item">Item</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input").first()
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&</span>")
search_field.val("&")
search_field.trigger("keyup")
expect(div.find(".no-results").length).toBe(1)
expect(div.find(".no-results").first().html().trim()).toBe("No results match <span>&amp;</span>")
it "matches in non-ascii languages like Chinese when selecting a single item", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting a single item with search_contains", ->
div = $("<div>").html("""
<select>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input").first()
search_field.val("PI:NAME:<NAME>END_PI")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
it "matches in non-ascii languages like Chinese when selecting multiple items with search_contains", ->
div = $("<div>").html("""
<select multiple>
<option value="一">一</option>
<option value="二">二</option>
<option value="三">三</option>
<option value="四">四</option>
<option value="五">五</option>
<option value="六">六</option>
<option value="七">七</option>
<option value="八">八</option>
<option value="九">九</option>
<option value="十">十</option>
<option value="十一">十一</option>
<option value="十二">十二</option>
</select>
""")
div.find("select").chosen({search_contains: true})
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(12)
search_field = div.find(".chosen-search-input")
search_field.val("一")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(2)
expect(div.find(".active-result")[0].innerHTML).toBe("<em>一</em>")
expect(div.find(".active-result")[1].innerHTML).toBe("十<em>一</em>")
it "highlights results correctly when multiple words are present", ->
div = $("<div>").html("""
<select>
<option value="oh hello">oh hello</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
expect(div.find(".active-result").length).toBe(1)
search_field = div.find(".chosen-search-input")
search_field.val("h")
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerHTML).toBe("oh <em>h</em>ello")
describe "respects word boundaries when not using search_contains", ->
div = $("<div>").html("""
<select>
<option value="(lparen">(lparen</option>
<option value="<langle"><langle</option>
<option value="[lbrace">[lbrace</option>
<option value="{lcurly">{lcurly</option>
<option value="¡upsidedownbang">¡upsidedownbang</option>
<option value="¿upsidedownqmark">¿upsidedownqmark</option>
<option value=".period">.period</option>
<option value="-dash">-dash</option>
<option value='"leftquote'>"leftquote</option>
<option value="'leftsinglequote">'leftsinglequote</option>
<option value="“angledleftquote">“angledleftquote</option>
<option value="‘angledleftsinglequote">‘angledleftsinglequote</option>
<option value="«guillemet">«guillemet</option>
</select>
""")
div.find("select").chosen()
div.find(".chosen-container").trigger("mousedown") # open the drop
search_field = div.find(".chosen-search-input")
div.find("option").each () ->
boundary_thing = this.value.slice(1)
it "correctly finds words that start after a(n) #{boundary_thing}", ->
search_field.val(boundary_thing)
search_field.trigger("keyup")
expect(div.find(".active-result").length).toBe(1)
expect(div.find(".active-result")[0].innerText.slice(1)).toBe(boundary_thing)
|
[
{
"context": " @resource = MyLibrary.Venue.build(token: 'abc123')\n expect(@resource.buildOwner().venueId).",
"end": 1025,
"score": 0.9928116202354431,
"start": 1019,
"tag": "KEY",
"value": "abc123"
},
{
"context": "n =>\n expect(@resource.token).toEqual('abc... | spec/base.coffee | nicklandgrebe/active-resource.js | 95 | describe 'ActiveResource', ->
beforeEach ->
moxios.install(MyLibrary.interface.axios)
window.onSuccess = jasmine.createSpy('onSuccess')
window.onFailure = jasmine.createSpy('onFailure')
window.onCompletion = jasmine.createSpy('onCompletion')
afterEach ->
moxios.uninstall()
describe '::Base', ->
describe '.links()', ->
it 'returns the correct links', ->
expect(MyLibrary.Product.links()).toEqual({ related: 'https://example.com/api/v1/products/' })
describe 'with a different primaryKey', ->
beforeEach ->
class MyLibrary.Venue extends MyLibrary.Base
this.className = 'Venue'
this.queryName = 'venues'
this.primaryKey = 'token'
@hasOne 'owner'
class MyLibrary.Owner extends MyLibrary.Base
this.className = 'Owner'
this.queryName = 'owners'
@belongsTo 'venue'
it 'constructs relationships with the primaryKey', ->
@resource = MyLibrary.Venue.build(token: 'abc123')
expect(@resource.buildOwner().venueId).toEqual('abc123')
describe 'when interfacing', ->
beforeEach ->
MyLibrary.Venue.find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Venue.find.tokenized)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'builds the primaryKey into the resource retrieved', ->
@promise.then =>
expect(@resource.token).toEqual('abc123')
it 'is rendered in a resource document with the primaryKey', ->
resourceDocument = JSON.stringify({
data: {
type: 'venues',
token: 'abc123'
}
})
@promise.then =>
@resource.save()
moxios.wait =>
expect(moxios.requests.mostRecent().data).toEqual(resourceDocument)
describe '.clone()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
@resource.assignAttributes({ paymentSource: MyLibrary.PaymentMethod.build() })
@resource.errors().add('attribute', 'invalid')
@clone = @resource.clone()
it 'returns a new resource', ->
@promise.then =>
expect(@clone).not.toBe(@resource)
it 'returns a klass of the same type as this', ->
@promise.then =>
expect(@clone.klass()).toBe(@resource.klass())
it 'clones attributes', ->
@promise.then =>
expect(_.omit(@clone.attributes(), 'productId', 'customerId', 'groupedOrderId')).toEqual(@resource.attributes())
it 'clones links', ->
@promise.then =>
expect(@clone.links()).toEqual(@resource.links())
it 'clones errors', ->
@promise.then =>
expect(@clone.errors().size()).toEqual(1)
it 'clones queryParams', ->
@promise.then =>
expect(@clone.queryParams()).toEqual({
fields: { orders: ['price'] },
include: ['giftCard', 'orderItems']
})
it 'sets relationships to clone', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).target).toEqual(@resource.association(name).target)
it 'sets loaded relationships to loaded', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).loaded()).toEqual(@resource.association(name).loaded())
it 'clones relationship resources attributes', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
if reflection.collection()
i = 0
@clone.association(name).target.each (t) =>
expect(t.attributes()).toEqual(@resource.association(name).target.get(i).attributes())
i += 1
else if @resource.association(name).target?
expect(@clone.association(name).target.attributes()).toEqual(@resource.association(name).target.attributes())
it 'clones relationship links', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).links()).toEqual(@resource.association(name).links())
describe '.responseMeta()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'adds meta attributes from response to responseMeta', ->
@promise.then =>
expect(@resource.responseMeta()).toEqual({ metaAttribute: 2 })
| 134369 | describe 'ActiveResource', ->
beforeEach ->
moxios.install(MyLibrary.interface.axios)
window.onSuccess = jasmine.createSpy('onSuccess')
window.onFailure = jasmine.createSpy('onFailure')
window.onCompletion = jasmine.createSpy('onCompletion')
afterEach ->
moxios.uninstall()
describe '::Base', ->
describe '.links()', ->
it 'returns the correct links', ->
expect(MyLibrary.Product.links()).toEqual({ related: 'https://example.com/api/v1/products/' })
describe 'with a different primaryKey', ->
beforeEach ->
class MyLibrary.Venue extends MyLibrary.Base
this.className = 'Venue'
this.queryName = 'venues'
this.primaryKey = 'token'
@hasOne 'owner'
class MyLibrary.Owner extends MyLibrary.Base
this.className = 'Owner'
this.queryName = 'owners'
@belongsTo 'venue'
it 'constructs relationships with the primaryKey', ->
@resource = MyLibrary.Venue.build(token: '<KEY>')
expect(@resource.buildOwner().venueId).toEqual('abc123')
describe 'when interfacing', ->
beforeEach ->
MyLibrary.Venue.find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Venue.find.tokenized)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'builds the primaryKey into the resource retrieved', ->
@promise.then =>
expect(@resource.token).toEqual('<KEY>')
it 'is rendered in a resource document with the primaryKey', ->
resourceDocument = JSON.stringify({
data: {
type: 'venues',
token: '<KEY>'
}
})
@promise.then =>
@resource.save()
moxios.wait =>
expect(moxios.requests.mostRecent().data).toEqual(resourceDocument)
describe '.clone()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
@resource.assignAttributes({ paymentSource: MyLibrary.PaymentMethod.build() })
@resource.errors().add('attribute', 'invalid')
@clone = @resource.clone()
it 'returns a new resource', ->
@promise.then =>
expect(@clone).not.toBe(@resource)
it 'returns a klass of the same type as this', ->
@promise.then =>
expect(@clone.klass()).toBe(@resource.klass())
it 'clones attributes', ->
@promise.then =>
expect(_.omit(@clone.attributes(), 'productId', 'customerId', 'groupedOrderId')).toEqual(@resource.attributes())
it 'clones links', ->
@promise.then =>
expect(@clone.links()).toEqual(@resource.links())
it 'clones errors', ->
@promise.then =>
expect(@clone.errors().size()).toEqual(1)
it 'clones queryParams', ->
@promise.then =>
expect(@clone.queryParams()).toEqual({
fields: { orders: ['price'] },
include: ['giftCard', 'orderItems']
})
it 'sets relationships to clone', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).target).toEqual(@resource.association(name).target)
it 'sets loaded relationships to loaded', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).loaded()).toEqual(@resource.association(name).loaded())
it 'clones relationship resources attributes', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
if reflection.collection()
i = 0
@clone.association(name).target.each (t) =>
expect(t.attributes()).toEqual(@resource.association(name).target.get(i).attributes())
i += 1
else if @resource.association(name).target?
expect(@clone.association(name).target.attributes()).toEqual(@resource.association(name).target.attributes())
it 'clones relationship links', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).links()).toEqual(@resource.association(name).links())
describe '.responseMeta()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'adds meta attributes from response to responseMeta', ->
@promise.then =>
expect(@resource.responseMeta()).toEqual({ metaAttribute: 2 })
| true | describe 'ActiveResource', ->
beforeEach ->
moxios.install(MyLibrary.interface.axios)
window.onSuccess = jasmine.createSpy('onSuccess')
window.onFailure = jasmine.createSpy('onFailure')
window.onCompletion = jasmine.createSpy('onCompletion')
afterEach ->
moxios.uninstall()
describe '::Base', ->
describe '.links()', ->
it 'returns the correct links', ->
expect(MyLibrary.Product.links()).toEqual({ related: 'https://example.com/api/v1/products/' })
describe 'with a different primaryKey', ->
beforeEach ->
class MyLibrary.Venue extends MyLibrary.Base
this.className = 'Venue'
this.queryName = 'venues'
this.primaryKey = 'token'
@hasOne 'owner'
class MyLibrary.Owner extends MyLibrary.Base
this.className = 'Owner'
this.queryName = 'owners'
@belongsTo 'venue'
it 'constructs relationships with the primaryKey', ->
@resource = MyLibrary.Venue.build(token: 'PI:KEY:<KEY>END_PI')
expect(@resource.buildOwner().venueId).toEqual('abc123')
describe 'when interfacing', ->
beforeEach ->
MyLibrary.Venue.find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Venue.find.tokenized)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'builds the primaryKey into the resource retrieved', ->
@promise.then =>
expect(@resource.token).toEqual('PI:KEY:<KEY>END_PI')
it 'is rendered in a resource document with the primaryKey', ->
resourceDocument = JSON.stringify({
data: {
type: 'venues',
token: 'PI:KEY:<KEY>END_PI'
}
})
@promise.then =>
@resource.save()
moxios.wait =>
expect(moxios.requests.mostRecent().data).toEqual(resourceDocument)
describe '.clone()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
@resource.assignAttributes({ paymentSource: MyLibrary.PaymentMethod.build() })
@resource.errors().add('attribute', 'invalid')
@clone = @resource.clone()
it 'returns a new resource', ->
@promise.then =>
expect(@clone).not.toBe(@resource)
it 'returns a klass of the same type as this', ->
@promise.then =>
expect(@clone.klass()).toBe(@resource.klass())
it 'clones attributes', ->
@promise.then =>
expect(_.omit(@clone.attributes(), 'productId', 'customerId', 'groupedOrderId')).toEqual(@resource.attributes())
it 'clones links', ->
@promise.then =>
expect(@clone.links()).toEqual(@resource.links())
it 'clones errors', ->
@promise.then =>
expect(@clone.errors().size()).toEqual(1)
it 'clones queryParams', ->
@promise.then =>
expect(@clone.queryParams()).toEqual({
fields: { orders: ['price'] },
include: ['giftCard', 'orderItems']
})
it 'sets relationships to clone', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).target).toEqual(@resource.association(name).target)
it 'sets loaded relationships to loaded', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).loaded()).toEqual(@resource.association(name).loaded())
it 'clones relationship resources attributes', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
if reflection.collection()
i = 0
@clone.association(name).target.each (t) =>
expect(t.attributes()).toEqual(@resource.association(name).target.get(i).attributes())
i += 1
else if @resource.association(name).target?
expect(@clone.association(name).target.attributes()).toEqual(@resource.association(name).target.attributes())
it 'clones relationship links', ->
@promise.then =>
@clone.klass().reflectOnAllAssociations().each (reflection) =>
name = reflection.name
expect(@clone.association(name).links()).toEqual(@resource.association(name).links())
describe '.responseMeta()', ->
beforeEach ->
MyLibrary.Order.includes('giftCard', 'orderItems').select('price').find(1)
.then window.onSuccess
@promise = moxios.wait =>
moxios.requests.mostRecent().respondWith(JsonApiResponses.Order.find.includes)
.then =>
@resource = window.onSuccess.calls.mostRecent().args[0]
it 'adds meta attributes from response to responseMeta', ->
@promise.then =>
expect(@resource.responseMeta()).toEqual({ metaAttribute: 2 })
|
[
{
"context": "cope help\n#\n# Dependencies:\n#\tNone\n#\n# Author:\n# pini shlomi shlomi@hpe.com\n\nfileupload = require('fileupload'",
"end": 859,
"score": 0.9994122385978699,
"start": 848,
"tag": "NAME",
"value": "pini shlomi"
},
{
"context": "# Dependencies:\n#\tNone\n#\n# Autho... | src/general.coffee | moneymayur/ChatBot | 0 | ###
Copyright 2016 Hewlett-Packard Development Company, L.P.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
Software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
###
# Description
# SiteScope Hubot by slack adapter
#
# Configuration:
# sitescope-instances.config
# sitescope-commands.help
#
# Description:
# SiteScope Hubot by slack adapter
#
# Commands:
# hubot: sitescope help
#
# Dependencies:
# None
#
# Author:
# pini shlomi shlomi@hpe.com
fileupload = require('fileupload').createFileUpload('./scripts')
supportCommandsResult = []
sitescopeSetting = null;
module.exports = (robot) ->
LoadSitescopeConfiguration robot,null,null
# Show or reload SiteScope instances configuration file
robot.respond /SiteScope reload config file/i, (msg) ->
reloadSiteScopeConfigFile robot,msg
robot.respond /SiteScope show config file/i, (msg) ->
showSiteScopeConfigFile robot,msg
# Generates SiteScope support comands.
robot.respond /SiteScope help/i, (msg) ->
getSiteScopeHelpSupport robot,msg
########################################################################################
# Load Sitescope configuration
########################################################################################
LoadSitescopeConfiguration = (robot,msg,show) ->
robot.logger.debug "load Sitescope setting config file"
fileupload.get 'sitescope-setting.config', (error, data) ->
if error
robot.logger.error error
return robot.emit 'error', error, msg
sitescopeSetting = JSON.parse(data)
process.env.SIS_CONFIGURATION = data
loadSitescopeSupportCommands robot
if show
showSiteScopeConfigFile robot,msg
########################################################################################
# Load Sitescope Support Commands
########################################################################################
loadSitescopeSupportCommands = (robot) ->
allHelpCommands = sitescopeSetting["help_commands"]
fieldsResult = []
supportCommandsResult = []
for key of allHelpCommands
commandObi = allHelpCommands[key]
Description = {}
Description['title'] = "Description :"
Description['value']=commandObi["Description"]
fieldsResult.push Description
Syntax = {}
Syntax['title'] = "Syntax :"
Syntax['value']=commandObi["Syntax"]
fieldsResult.push Syntax
Examples = {}
Examples['title'] = "Examples :"
Examples['value']=commandObi["Examples"]
fieldsResult.push Examples
attachment =
color:'#0000FF'
fields: fieldsResult
fieldsResult = []
supportCommandsResult.push(attachment)
########################################################################################
# reload SiteScope config file
########################################################################################
reloadSiteScopeConfigFile = (robot,msg) ->
LoadSitescopeConfiguration robot,msg,true
########################################################################################
# show SiteScope config file
########################################################################################
showSiteScopeConfigFile = (robot,msg) ->
fieldsResult = []
sisInstences = getSiteScopeInstances robot
defaultInstance = getDefaultSisInstance robot
for key of sisInstences
robot.logger.debug "instane : #{key}"
keyDefault = ""
if (defaultInstance == key)
keyDefault = "is Default"
instances =
value:key + " #{keyDefault}"
fieldsResult.push instances
instancesResult =
color:'#0000FF'
title:"Sitescope instances"
fields: fieldsResult
msgData =
channel: msg.message.room
attachments:instancesResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get SiteScope Help Support
########################################################################################
getSiteScopeHelpSupport = (robot,msg) ->
msgData =
channel: msg.message.room
text:'*Support SiteScope Commands*'
attachments:supportCommandsResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get default instances
########################################################################################
getDefaultSisInstance = (robot) ->
if sitescopeSetting != null
robot.logger.debug "default_sis: \n#{JSON.stringify(sitescopeSetting["variables"]["default_sis"])}"
sitescopeSetting["variables"]["default_sis"];
########################################################################################
# get instances
########################################################################################
getSiteScopeInstances = (robot) ->
if sitescopeSetting != null
robot.logger.debug "SIS_INSTANCES in getSiteScopeInstances: \n#{JSON.stringify(sitescopeSetting["instances"])}"
sitescopeSetting["instances"];
| 161518 | ###
Copyright 2016 Hewlett-Packard Development Company, L.P.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
Software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
###
# Description
# SiteScope Hubot by slack adapter
#
# Configuration:
# sitescope-instances.config
# sitescope-commands.help
#
# Description:
# SiteScope Hubot by slack adapter
#
# Commands:
# hubot: sitescope help
#
# Dependencies:
# None
#
# Author:
# <NAME> <EMAIL>
fileupload = require('fileupload').createFileUpload('./scripts')
supportCommandsResult = []
sitescopeSetting = null;
module.exports = (robot) ->
LoadSitescopeConfiguration robot,null,null
# Show or reload SiteScope instances configuration file
robot.respond /SiteScope reload config file/i, (msg) ->
reloadSiteScopeConfigFile robot,msg
robot.respond /SiteScope show config file/i, (msg) ->
showSiteScopeConfigFile robot,msg
# Generates SiteScope support comands.
robot.respond /SiteScope help/i, (msg) ->
getSiteScopeHelpSupport robot,msg
########################################################################################
# Load Sitescope configuration
########################################################################################
LoadSitescopeConfiguration = (robot,msg,show) ->
robot.logger.debug "load Sitescope setting config file"
fileupload.get 'sitescope-setting.config', (error, data) ->
if error
robot.logger.error error
return robot.emit 'error', error, msg
sitescopeSetting = JSON.parse(data)
process.env.SIS_CONFIGURATION = data
loadSitescopeSupportCommands robot
if show
showSiteScopeConfigFile robot,msg
########################################################################################
# Load Sitescope Support Commands
########################################################################################
loadSitescopeSupportCommands = (robot) ->
allHelpCommands = sitescopeSetting["help_commands"]
fieldsResult = []
supportCommandsResult = []
for key of allHelpCommands
commandObi = allHelpCommands[key]
Description = {}
Description['title'] = "Description :"
Description['value']=commandObi["Description"]
fieldsResult.push Description
Syntax = {}
Syntax['title'] = "Syntax :"
Syntax['value']=commandObi["Syntax"]
fieldsResult.push Syntax
Examples = {}
Examples['title'] = "Examples :"
Examples['value']=commandObi["Examples"]
fieldsResult.push Examples
attachment =
color:'#0000FF'
fields: fieldsResult
fieldsResult = []
supportCommandsResult.push(attachment)
########################################################################################
# reload SiteScope config file
########################################################################################
reloadSiteScopeConfigFile = (robot,msg) ->
LoadSitescopeConfiguration robot,msg,true
########################################################################################
# show SiteScope config file
########################################################################################
showSiteScopeConfigFile = (robot,msg) ->
fieldsResult = []
sisInstences = getSiteScopeInstances robot
defaultInstance = getDefaultSisInstance robot
for key of sisInstences
robot.logger.debug "instane : #{key}"
keyDefault = ""
if (defaultInstance == key)
keyDefault = "is Default"
instances =
value:key + " #{keyDefault}"
fieldsResult.push instances
instancesResult =
color:'#0000FF'
title:"Sitescope instances"
fields: fieldsResult
msgData =
channel: msg.message.room
attachments:instancesResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get SiteScope Help Support
########################################################################################
getSiteScopeHelpSupport = (robot,msg) ->
msgData =
channel: msg.message.room
text:'*Support SiteScope Commands*'
attachments:supportCommandsResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get default instances
########################################################################################
getDefaultSisInstance = (robot) ->
if sitescopeSetting != null
robot.logger.debug "default_sis: \n#{JSON.stringify(sitescopeSetting["variables"]["default_sis"])}"
sitescopeSetting["variables"]["default_sis"];
########################################################################################
# get instances
########################################################################################
getSiteScopeInstances = (robot) ->
if sitescopeSetting != null
robot.logger.debug "SIS_INSTANCES in getSiteScopeInstances: \n#{JSON.stringify(sitescopeSetting["instances"])}"
sitescopeSetting["instances"];
| true | ###
Copyright 2016 Hewlett-Packard Development Company, L.P.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
Software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
###
# Description
# SiteScope Hubot by slack adapter
#
# Configuration:
# sitescope-instances.config
# sitescope-commands.help
#
# Description:
# SiteScope Hubot by slack adapter
#
# Commands:
# hubot: sitescope help
#
# Dependencies:
# None
#
# Author:
# PI:NAME:<NAME>END_PI PI:EMAIL:<EMAIL>END_PI
fileupload = require('fileupload').createFileUpload('./scripts')
supportCommandsResult = []
sitescopeSetting = null;
module.exports = (robot) ->
LoadSitescopeConfiguration robot,null,null
# Show or reload SiteScope instances configuration file
robot.respond /SiteScope reload config file/i, (msg) ->
reloadSiteScopeConfigFile robot,msg
robot.respond /SiteScope show config file/i, (msg) ->
showSiteScopeConfigFile robot,msg
# Generates SiteScope support comands.
robot.respond /SiteScope help/i, (msg) ->
getSiteScopeHelpSupport robot,msg
########################################################################################
# Load Sitescope configuration
########################################################################################
LoadSitescopeConfiguration = (robot,msg,show) ->
robot.logger.debug "load Sitescope setting config file"
fileupload.get 'sitescope-setting.config', (error, data) ->
if error
robot.logger.error error
return robot.emit 'error', error, msg
sitescopeSetting = JSON.parse(data)
process.env.SIS_CONFIGURATION = data
loadSitescopeSupportCommands robot
if show
showSiteScopeConfigFile robot,msg
########################################################################################
# Load Sitescope Support Commands
########################################################################################
loadSitescopeSupportCommands = (robot) ->
allHelpCommands = sitescopeSetting["help_commands"]
fieldsResult = []
supportCommandsResult = []
for key of allHelpCommands
commandObi = allHelpCommands[key]
Description = {}
Description['title'] = "Description :"
Description['value']=commandObi["Description"]
fieldsResult.push Description
Syntax = {}
Syntax['title'] = "Syntax :"
Syntax['value']=commandObi["Syntax"]
fieldsResult.push Syntax
Examples = {}
Examples['title'] = "Examples :"
Examples['value']=commandObi["Examples"]
fieldsResult.push Examples
attachment =
color:'#0000FF'
fields: fieldsResult
fieldsResult = []
supportCommandsResult.push(attachment)
########################################################################################
# reload SiteScope config file
########################################################################################
reloadSiteScopeConfigFile = (robot,msg) ->
LoadSitescopeConfiguration robot,msg,true
########################################################################################
# show SiteScope config file
########################################################################################
showSiteScopeConfigFile = (robot,msg) ->
fieldsResult = []
sisInstences = getSiteScopeInstances robot
defaultInstance = getDefaultSisInstance robot
for key of sisInstences
robot.logger.debug "instane : #{key}"
keyDefault = ""
if (defaultInstance == key)
keyDefault = "is Default"
instances =
value:key + " #{keyDefault}"
fieldsResult.push instances
instancesResult =
color:'#0000FF'
title:"Sitescope instances"
fields: fieldsResult
msgData =
channel: msg.message.room
attachments:instancesResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get SiteScope Help Support
########################################################################################
getSiteScopeHelpSupport = (robot,msg) ->
msgData =
channel: msg.message.room
text:'*Support SiteScope Commands*'
attachments:supportCommandsResult
robot.emit 'slack.attachment', msgData
########################################################################################
# get default instances
########################################################################################
getDefaultSisInstance = (robot) ->
if sitescopeSetting != null
robot.logger.debug "default_sis: \n#{JSON.stringify(sitescopeSetting["variables"]["default_sis"])}"
sitescopeSetting["variables"]["default_sis"];
########################################################################################
# get instances
########################################################################################
getSiteScopeInstances = (robot) ->
if sitescopeSetting != null
robot.logger.debug "SIS_INSTANCES in getSiteScopeInstances: \n#{JSON.stringify(sitescopeSetting["instances"])}"
sitescopeSetting["instances"];
|
[
{
"context": "### \nGET.js v0.2\nCopyright © 2012 Mickaël Raybaud-Roig, All rights reserved.\nLicensed under the BSD 3-cl",
"end": 54,
"score": 0.9998916387557983,
"start": 34,
"tag": "NAME",
"value": "Mickaël Raybaud-Roig"
}
] | GET.coffee | m-r-r/GET.js | 2 | ###
GET.js v0.2
Copyright © 2012 Mickaël Raybaud-Roig, All rights reserved.
Licensed under the BSD 3-clause license, see the COPYING file for details
###
parse_query = (url) ->
decoded = {}
return unless url?
query = ( url.split('?', 2) )[1]
return unless query?
args = query.split('&')
for arg in args
[key, value] = arg.split('=', 2)
if key? and key.length > 0
decoded[key.toLowerCase()] = value
return decoded
if window? and document?
window.$_GET = parse_query(document.location.href)
else if module?
module.exports = {'parse_query': parse_query}
| 171101 | ###
GET.js v0.2
Copyright © 2012 <NAME>, All rights reserved.
Licensed under the BSD 3-clause license, see the COPYING file for details
###
parse_query = (url) ->
decoded = {}
return unless url?
query = ( url.split('?', 2) )[1]
return unless query?
args = query.split('&')
for arg in args
[key, value] = arg.split('=', 2)
if key? and key.length > 0
decoded[key.toLowerCase()] = value
return decoded
if window? and document?
window.$_GET = parse_query(document.location.href)
else if module?
module.exports = {'parse_query': parse_query}
| true | ###
GET.js v0.2
Copyright © 2012 PI:NAME:<NAME>END_PI, All rights reserved.
Licensed under the BSD 3-clause license, see the COPYING file for details
###
parse_query = (url) ->
decoded = {}
return unless url?
query = ( url.split('?', 2) )[1]
return unless query?
args = query.split('&')
for arg in args
[key, value] = arg.split('=', 2)
if key? and key.length > 0
decoded[key.toLowerCase()] = value
return decoded
if window? and document?
window.$_GET = parse_query(document.location.href)
else if module?
module.exports = {'parse_query': parse_query}
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9962730407714844,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-server-multiheaders2.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Verify that the HTTP server implementation handles multiple instances
# of the same header as per RFC2616: joining the handful of fields by ', '
# that support it, and dropping duplicates for other fields.
# GH-2750
# GH-715
# GH-1187
# GH-1083
# GH-4052
# GH-2764
# GH-2764
# GH-6660
# not a special case, just making sure it's parsed correctly
# make sure that unspecified headers is treated as multiple
# special case, tested differently
#'Content-Length',
makeHeader = (value) ->
(header) ->
[
header
value
]
common = require("../common")
assert = require("assert")
http = require("http")
multipleAllowed = [
"Accept"
"Accept-Charset"
"Accept-Encoding"
"Accept-Language"
"Connection"
"Cookie"
"DAV"
"Pragma"
"Link"
"WWW-Authenticate"
"Proxy-Authenticate"
"Sec-Websocket-Extensions"
"Sec-Websocket-Protocol"
"Via"
"X-Forwarded-For"
"Some-Random-Header"
"X-Some-Random-Header"
]
multipleForbidden = [
"Content-Type"
"User-Agent"
"Referer"
"Host"
"Authorization"
"Proxy-Authorization"
"If-Modified-Since"
"If-Unmodified-Since"
"From"
"Location"
"Max-Forwards"
]
srv = http.createServer((req, res) ->
multipleForbidden.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo", "header parsed incorrectly: " + header
return
multipleAllowed.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo, bar", "header parsed incorrectly: " + header
return
assert.equal req.headers["content-length"], 0
res.writeHead 200,
"Content-Type": "text/plain"
res.end "EOF"
srv.close()
return
)
# content-length is a special case since node.js
# is dropping connetions with non-numeric headers
headers = [].concat(multipleAllowed.map(makeHeader("foo"))).concat(multipleForbidden.map(makeHeader("foo"))).concat(multipleAllowed.map(makeHeader("bar"))).concat(multipleForbidden.map(makeHeader("bar"))).concat([
[
"content-length"
0
]
[
"content-length"
123
]
])
srv.listen common.PORT, ->
http.get
host: "localhost"
port: common.PORT
path: "/"
headers: headers
return
| 163073 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Verify that the HTTP server implementation handles multiple instances
# of the same header as per RFC2616: joining the handful of fields by ', '
# that support it, and dropping duplicates for other fields.
# GH-2750
# GH-715
# GH-1187
# GH-1083
# GH-4052
# GH-2764
# GH-2764
# GH-6660
# not a special case, just making sure it's parsed correctly
# make sure that unspecified headers is treated as multiple
# special case, tested differently
#'Content-Length',
makeHeader = (value) ->
(header) ->
[
header
value
]
common = require("../common")
assert = require("assert")
http = require("http")
multipleAllowed = [
"Accept"
"Accept-Charset"
"Accept-Encoding"
"Accept-Language"
"Connection"
"Cookie"
"DAV"
"Pragma"
"Link"
"WWW-Authenticate"
"Proxy-Authenticate"
"Sec-Websocket-Extensions"
"Sec-Websocket-Protocol"
"Via"
"X-Forwarded-For"
"Some-Random-Header"
"X-Some-Random-Header"
]
multipleForbidden = [
"Content-Type"
"User-Agent"
"Referer"
"Host"
"Authorization"
"Proxy-Authorization"
"If-Modified-Since"
"If-Unmodified-Since"
"From"
"Location"
"Max-Forwards"
]
srv = http.createServer((req, res) ->
multipleForbidden.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo", "header parsed incorrectly: " + header
return
multipleAllowed.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo, bar", "header parsed incorrectly: " + header
return
assert.equal req.headers["content-length"], 0
res.writeHead 200,
"Content-Type": "text/plain"
res.end "EOF"
srv.close()
return
)
# content-length is a special case since node.js
# is dropping connetions with non-numeric headers
headers = [].concat(multipleAllowed.map(makeHeader("foo"))).concat(multipleForbidden.map(makeHeader("foo"))).concat(multipleAllowed.map(makeHeader("bar"))).concat(multipleForbidden.map(makeHeader("bar"))).concat([
[
"content-length"
0
]
[
"content-length"
123
]
])
srv.listen common.PORT, ->
http.get
host: "localhost"
port: common.PORT
path: "/"
headers: headers
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Verify that the HTTP server implementation handles multiple instances
# of the same header as per RFC2616: joining the handful of fields by ', '
# that support it, and dropping duplicates for other fields.
# GH-2750
# GH-715
# GH-1187
# GH-1083
# GH-4052
# GH-2764
# GH-2764
# GH-6660
# not a special case, just making sure it's parsed correctly
# make sure that unspecified headers is treated as multiple
# special case, tested differently
#'Content-Length',
makeHeader = (value) ->
(header) ->
[
header
value
]
common = require("../common")
assert = require("assert")
http = require("http")
multipleAllowed = [
"Accept"
"Accept-Charset"
"Accept-Encoding"
"Accept-Language"
"Connection"
"Cookie"
"DAV"
"Pragma"
"Link"
"WWW-Authenticate"
"Proxy-Authenticate"
"Sec-Websocket-Extensions"
"Sec-Websocket-Protocol"
"Via"
"X-Forwarded-For"
"Some-Random-Header"
"X-Some-Random-Header"
]
multipleForbidden = [
"Content-Type"
"User-Agent"
"Referer"
"Host"
"Authorization"
"Proxy-Authorization"
"If-Modified-Since"
"If-Unmodified-Since"
"From"
"Location"
"Max-Forwards"
]
srv = http.createServer((req, res) ->
multipleForbidden.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo", "header parsed incorrectly: " + header
return
multipleAllowed.forEach (header) ->
assert.equal req.headers[header.toLowerCase()], "foo, bar", "header parsed incorrectly: " + header
return
assert.equal req.headers["content-length"], 0
res.writeHead 200,
"Content-Type": "text/plain"
res.end "EOF"
srv.close()
return
)
# content-length is a special case since node.js
# is dropping connetions with non-numeric headers
headers = [].concat(multipleAllowed.map(makeHeader("foo"))).concat(multipleForbidden.map(makeHeader("foo"))).concat(multipleAllowed.map(makeHeader("bar"))).concat(multipleForbidden.map(makeHeader("bar"))).concat([
[
"content-length"
0
]
[
"content-length"
123
]
])
srv.listen common.PORT, ->
http.get
host: "localhost"
port: common.PORT
path: "/"
headers: headers
return
|
[
{
"context": "Email.options = \n\t\tfacebook: 'http://facebook.com/whowouldyouratherapp'\n\t\ttwitter: 'http://twitter.com/whowouldyourather",
"end": 96,
"score": 0.999255359172821,
"start": 76,
"tag": "USERNAME",
"value": "whowouldyouratherapp"
},
{
"context": "wouldyouratherapp'\n\... | both/_config/emails.coffee | Innarticles/whowouldyourather-meteorjs-app | 0 | if Meteor.isServer
PrettyEmail.options =
facebook: 'http://facebook.com/whowouldyouratherapp'
twitter: 'http://twitter.com/whowouldyourather'
website: 'http://whowouldyourather.meteor.com'
siteName: 'Who Would You Rather'
companyAddress: 'Accra, Ghana'
companyName: 'MEST EITs Group C'
companyUrl: 'http://whowouldyourather.meteor.com' | 88771 | if Meteor.isServer
PrettyEmail.options =
facebook: 'http://facebook.com/whowouldyouratherapp'
twitter: 'http://twitter.com/whowouldyourather'
website: 'http://whowouldyourather.meteor.com'
siteName: '<NAME>'
companyAddress: 'Accra, Ghana'
companyName: 'MEST EITs Group C'
companyUrl: 'http://whowouldyourather.meteor.com' | true | if Meteor.isServer
PrettyEmail.options =
facebook: 'http://facebook.com/whowouldyouratherapp'
twitter: 'http://twitter.com/whowouldyourather'
website: 'http://whowouldyourather.meteor.com'
siteName: 'PI:NAME:<NAME>END_PI'
companyAddress: 'Accra, Ghana'
companyName: 'MEST EITs Group C'
companyUrl: 'http://whowouldyourather.meteor.com' |
[
{
"context": "n: 'mock-region'\n credentials:\n accessKeyId: 'akid'\n secretAccessKey: 'secret'\n sessionToken: ",
"end": 464,
"score": 0.9976419806480408,
"start": 460,
"tag": "KEY",
"value": "akid"
},
{
"context": "ls:\n accessKeyId: 'akid'\n secretAccessKey: 'secre... | test/helpers.coffee | selboo/scs-sdk-js | 28 | AWS = null
global = null
ignoreRequire = require
if typeof window == 'undefined'
AWS = ignoreRequire('../lib/aws')
global = GLOBAL
else
AWS = window.AWS
global = window
EventEmitter = require('events').EventEmitter
Buffer = AWS.util.Buffer
semver = require('semver')
require('util').print = (data) ->
process.stdout.write(data)
# Mock credentials
AWS.config.update
paramValidation: false
region: 'mock-region'
credentials:
accessKeyId: 'akid'
secretAccessKey: 'secret'
sessionToken: 'session'
spies = null
beforeEach ->
spies = []
afterEach ->
while spies.length > 0
spy = spies.pop()
spy.object[spy.methodName] = spy.origMethod
_createSpy = (name) ->
spy = ->
spy.calls.push
object: this
arguments: Array.prototype.slice.call(arguments)
if spy.callFn
return spy.callFn.apply(spy.object, arguments)
if spy.shouldReturn
return spy.returnValue
spy.object = this
spy.methodName = name
spy.callFn = null
spy.shouldReturn = false
spy.returnValue = null
spy.calls = []
spy.andReturn = (value) -> spy.shouldReturn = true; spy.returnValue = value; spy
spy.andCallFake = (fn) -> spy.callFn = fn; spy
spy
_spyOn = (obj, methodName) ->
spy = _createSpy.call(obj, methodName)
spy.origMethod = obj[methodName]
spy.andCallThrough = -> spy.callFn = spy.origMethod; spy
obj[methodName] = spy
spies.push(spy)
spy
# Disable setTimeout for tests
# Warning: this might cause unpredictable results
# TODO: refactor this out.
global.setTimeout = (fn) -> fn()
global.expect = require('chai').expect
matchXML = (xml1, xml2) ->
results = []
parser = new (require('xml2js').Parser)()
[xml1, xml2].forEach (xml) ->
parser.parseString xml, (e,r) ->
if e then throw e
results.push(r)
expect(results[0]).to.eql(results[1])
MockService = AWS.Service.defineService 'mockService',
serviceIdentifier: 'mock'
initialize: (config) ->
AWS.Service.prototype.initialize.call(this, config)
@config.credentials = accessKeyId: 'akid', secretAccessKey: 'secret'
@config.region = 'mock-region'
setupRequestListeners: (request) ->
request.on 'extractData', (resp) ->
resp.data = resp.httpResponse.body.toString()
request.on 'extractError', (resp) ->
resp.error =
code: resp.httpResponse.body.toString() || resp.httpResponse.statusCode
message: null
api: new AWS.Model.Api metadata:
endpointPrefix: 'mockservice'
signatureVersion: 'v4'
mockHttpSuccessfulResponse = (status, headers, data, cb) ->
if !Array.isArray(data)
data = [data]
httpResp = new EventEmitter()
httpResp.statusCode = status
httpResp.headers = headers
cb(httpResp)
httpResp.emit('headers', status, headers)
if AWS.util.isNode() && httpResp._events.readable
httpResp.read = ->
if data.length > 0
chunk = data.shift()
if chunk is null
null
else
new Buffer(chunk)
else
null
AWS.util.arrayEach data.slice(), (str) ->
if AWS.util.isNode() && (httpResp._events.readable || semver.gt(process.version, 'v0.11.3'))
httpResp.emit('readable')
else
httpResp.emit('data', new Buffer(str))
if httpResp._events['readable'] || httpResp._events['data']
httpResp.emit('end')
else
httpResp.emit('aborted')
mockHttpResponse = (status, headers, data) ->
stream = new EventEmitter()
stream.setMaxListeners(0)
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if typeof status == 'number'
mockHttpSuccessfulResponse status, headers, data, cb
else
errCb(status)
stream
return stream
mockIntermittentFailureResponse = (numFailures, status, headers, data) ->
retryCount = 0
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if retryCount < numFailures
retryCount += 1
errCb code: 'NetworkingError', message: 'FAIL!'
else
statusCode = retryCount < numFailures ? 500 : status
mockHttpSuccessfulResponse statusCode, headers, data, cb
new EventEmitter()
mockResponse = (svc, resp) ->
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
AWS.util.update req.response, resp
mockResponses = (svc, resps) ->
index = 0
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
resp = resps[index]
if resp
AWS.util.update req.response, resp
index += 1
module.exports =
AWS: AWS
util: AWS.util
spyOn: _spyOn
createSpy: _createSpy
matchXML: matchXML
mockHttpResponse: mockHttpResponse
mockIntermittentFailureResponse: mockIntermittentFailureResponse
mockHttpSuccessfulResponse: mockHttpSuccessfulResponse
mockResponse: mockResponse
mockResponses: mockResponses
MockService: MockService
| 193459 | AWS = null
global = null
ignoreRequire = require
if typeof window == 'undefined'
AWS = ignoreRequire('../lib/aws')
global = GLOBAL
else
AWS = window.AWS
global = window
EventEmitter = require('events').EventEmitter
Buffer = AWS.util.Buffer
semver = require('semver')
require('util').print = (data) ->
process.stdout.write(data)
# Mock credentials
AWS.config.update
paramValidation: false
region: 'mock-region'
credentials:
accessKeyId: '<KEY>'
secretAccessKey: '<KEY>'
sessionToken: '<KEY>'
spies = null
beforeEach ->
spies = []
afterEach ->
while spies.length > 0
spy = spies.pop()
spy.object[spy.methodName] = spy.origMethod
_createSpy = (name) ->
spy = ->
spy.calls.push
object: this
arguments: Array.prototype.slice.call(arguments)
if spy.callFn
return spy.callFn.apply(spy.object, arguments)
if spy.shouldReturn
return spy.returnValue
spy.object = this
spy.methodName = name
spy.callFn = null
spy.shouldReturn = false
spy.returnValue = null
spy.calls = []
spy.andReturn = (value) -> spy.shouldReturn = true; spy.returnValue = value; spy
spy.andCallFake = (fn) -> spy.callFn = fn; spy
spy
_spyOn = (obj, methodName) ->
spy = _createSpy.call(obj, methodName)
spy.origMethod = obj[methodName]
spy.andCallThrough = -> spy.callFn = spy.origMethod; spy
obj[methodName] = spy
spies.push(spy)
spy
# Disable setTimeout for tests
# Warning: this might cause unpredictable results
# TODO: refactor this out.
global.setTimeout = (fn) -> fn()
global.expect = require('chai').expect
matchXML = (xml1, xml2) ->
results = []
parser = new (require('xml2js').Parser)()
[xml1, xml2].forEach (xml) ->
parser.parseString xml, (e,r) ->
if e then throw e
results.push(r)
expect(results[0]).to.eql(results[1])
MockService = AWS.Service.defineService 'mockService',
serviceIdentifier: 'mock'
initialize: (config) ->
AWS.Service.prototype.initialize.call(this, config)
@config.credentials = accessKeyId: '<KEY>', secretAccessKey: '<KEY>'
@config.region = 'mock-region'
setupRequestListeners: (request) ->
request.on 'extractData', (resp) ->
resp.data = resp.httpResponse.body.toString()
request.on 'extractError', (resp) ->
resp.error =
code: resp.httpResponse.body.toString() || resp.httpResponse.statusCode
message: null
api: new AWS.Model.Api metadata:
endpointPrefix: 'mockservice'
signatureVersion: 'v4'
mockHttpSuccessfulResponse = (status, headers, data, cb) ->
if !Array.isArray(data)
data = [data]
httpResp = new EventEmitter()
httpResp.statusCode = status
httpResp.headers = headers
cb(httpResp)
httpResp.emit('headers', status, headers)
if AWS.util.isNode() && httpResp._events.readable
httpResp.read = ->
if data.length > 0
chunk = data.shift()
if chunk is null
null
else
new Buffer(chunk)
else
null
AWS.util.arrayEach data.slice(), (str) ->
if AWS.util.isNode() && (httpResp._events.readable || semver.gt(process.version, 'v0.11.3'))
httpResp.emit('readable')
else
httpResp.emit('data', new Buffer(str))
if httpResp._events['readable'] || httpResp._events['data']
httpResp.emit('end')
else
httpResp.emit('aborted')
mockHttpResponse = (status, headers, data) ->
stream = new EventEmitter()
stream.setMaxListeners(0)
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if typeof status == 'number'
mockHttpSuccessfulResponse status, headers, data, cb
else
errCb(status)
stream
return stream
mockIntermittentFailureResponse = (numFailures, status, headers, data) ->
retryCount = 0
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if retryCount < numFailures
retryCount += 1
errCb code: 'NetworkingError', message: 'FAIL!'
else
statusCode = retryCount < numFailures ? 500 : status
mockHttpSuccessfulResponse statusCode, headers, data, cb
new EventEmitter()
mockResponse = (svc, resp) ->
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
AWS.util.update req.response, resp
mockResponses = (svc, resps) ->
index = 0
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
resp = resps[index]
if resp
AWS.util.update req.response, resp
index += 1
module.exports =
AWS: AWS
util: AWS.util
spyOn: _spyOn
createSpy: _createSpy
matchXML: matchXML
mockHttpResponse: mockHttpResponse
mockIntermittentFailureResponse: mockIntermittentFailureResponse
mockHttpSuccessfulResponse: mockHttpSuccessfulResponse
mockResponse: mockResponse
mockResponses: mockResponses
MockService: MockService
| true | AWS = null
global = null
ignoreRequire = require
if typeof window == 'undefined'
AWS = ignoreRequire('../lib/aws')
global = GLOBAL
else
AWS = window.AWS
global = window
EventEmitter = require('events').EventEmitter
Buffer = AWS.util.Buffer
semver = require('semver')
require('util').print = (data) ->
process.stdout.write(data)
# Mock credentials
AWS.config.update
paramValidation: false
region: 'mock-region'
credentials:
accessKeyId: 'PI:KEY:<KEY>END_PI'
secretAccessKey: 'PI:KEY:<KEY>END_PI'
sessionToken: 'PI:KEY:<KEY>END_PI'
spies = null
beforeEach ->
spies = []
afterEach ->
while spies.length > 0
spy = spies.pop()
spy.object[spy.methodName] = spy.origMethod
_createSpy = (name) ->
spy = ->
spy.calls.push
object: this
arguments: Array.prototype.slice.call(arguments)
if spy.callFn
return spy.callFn.apply(spy.object, arguments)
if spy.shouldReturn
return spy.returnValue
spy.object = this
spy.methodName = name
spy.callFn = null
spy.shouldReturn = false
spy.returnValue = null
spy.calls = []
spy.andReturn = (value) -> spy.shouldReturn = true; spy.returnValue = value; spy
spy.andCallFake = (fn) -> spy.callFn = fn; spy
spy
_spyOn = (obj, methodName) ->
spy = _createSpy.call(obj, methodName)
spy.origMethod = obj[methodName]
spy.andCallThrough = -> spy.callFn = spy.origMethod; spy
obj[methodName] = spy
spies.push(spy)
spy
# Disable setTimeout for tests
# Warning: this might cause unpredictable results
# TODO: refactor this out.
global.setTimeout = (fn) -> fn()
global.expect = require('chai').expect
matchXML = (xml1, xml2) ->
results = []
parser = new (require('xml2js').Parser)()
[xml1, xml2].forEach (xml) ->
parser.parseString xml, (e,r) ->
if e then throw e
results.push(r)
expect(results[0]).to.eql(results[1])
MockService = AWS.Service.defineService 'mockService',
serviceIdentifier: 'mock'
initialize: (config) ->
AWS.Service.prototype.initialize.call(this, config)
@config.credentials = accessKeyId: 'PI:KEY:<KEY>END_PI', secretAccessKey: 'PI:KEY:<KEY>END_PI'
@config.region = 'mock-region'
setupRequestListeners: (request) ->
request.on 'extractData', (resp) ->
resp.data = resp.httpResponse.body.toString()
request.on 'extractError', (resp) ->
resp.error =
code: resp.httpResponse.body.toString() || resp.httpResponse.statusCode
message: null
api: new AWS.Model.Api metadata:
endpointPrefix: 'mockservice'
signatureVersion: 'v4'
mockHttpSuccessfulResponse = (status, headers, data, cb) ->
if !Array.isArray(data)
data = [data]
httpResp = new EventEmitter()
httpResp.statusCode = status
httpResp.headers = headers
cb(httpResp)
httpResp.emit('headers', status, headers)
if AWS.util.isNode() && httpResp._events.readable
httpResp.read = ->
if data.length > 0
chunk = data.shift()
if chunk is null
null
else
new Buffer(chunk)
else
null
AWS.util.arrayEach data.slice(), (str) ->
if AWS.util.isNode() && (httpResp._events.readable || semver.gt(process.version, 'v0.11.3'))
httpResp.emit('readable')
else
httpResp.emit('data', new Buffer(str))
if httpResp._events['readable'] || httpResp._events['data']
httpResp.emit('end')
else
httpResp.emit('aborted')
mockHttpResponse = (status, headers, data) ->
stream = new EventEmitter()
stream.setMaxListeners(0)
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if typeof status == 'number'
mockHttpSuccessfulResponse status, headers, data, cb
else
errCb(status)
stream
return stream
mockIntermittentFailureResponse = (numFailures, status, headers, data) ->
retryCount = 0
_spyOn(AWS.HttpClient, 'getInstance')
AWS.HttpClient.getInstance.andReturn handleRequest: (req, opts, cb, errCb) ->
if retryCount < numFailures
retryCount += 1
errCb code: 'NetworkingError', message: 'FAIL!'
else
statusCode = retryCount < numFailures ? 500 : status
mockHttpSuccessfulResponse statusCode, headers, data, cb
new EventEmitter()
mockResponse = (svc, resp) ->
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
AWS.util.update req.response, resp
mockResponses = (svc, resps) ->
index = 0
addAll = svc.addAllRequestListeners
_spyOn(svc, 'addAllRequestListeners').andCallFake (req) ->
req.response.httpResponse.statusCode = 200
addAll.call(svc, req)
req.removeAllListeners('send')
req.removeAllListeners('extractError')
req.removeAllListeners('extractData')
req.on 'validateResponse', ->
resp = resps[index]
if resp
AWS.util.update req.response, resp
index += 1
module.exports =
AWS: AWS
util: AWS.util
spyOn: _spyOn
createSpy: _createSpy
matchXML: matchXML
mockHttpResponse: mockHttpResponse
mockIntermittentFailureResponse: mockIntermittentFailureResponse
mockHttpSuccessfulResponse: mockHttpSuccessfulResponse
mockResponse: mockResponse
mockResponses: mockResponses
MockService: MockService
|
[
{
"context": "\n 'git remote rm origin'\n \"git remote add origin git@#{repoName}.github.com:#{repoUrl}.git\"\n \"ssh-keygen",
"end": 347,
"score": 0.6255354881286621,
"start": 344,
"tag": "EMAIL",
"value": "git"
},
{
"context": "ote rm origin'\n \"git remote add origin git@#{rep... | index.coffee | vassiliy/github-add-key | 0 | 'use strict'
scriptName = process.argv[3] or 'run'
repoUrl = process.argv[2]
repoName = repoUrl.split('/').join '-'
repoArray = repoName.split '-'
repoArray.shift() if repoArray[0] is repoArray[1]
repoName = repoArray.join '-'
script = [
"touch #{scriptName}"
"cat > #{scriptName} <<EOR"
'git remote rm origin'
"git remote add origin git@#{repoName}.github.com:#{repoUrl}.git"
"ssh-keygen -t rsa -f ~/.ssh/id_rsa-#{repoName} -C https://github.com/#{repoUrl}"
"ssh-add ~/.ssh/id_rsa-#{repoName}"
"pbcopy < ~/.ssh/id_rsa-#{repoName}.pub"
'touch ~/.ssh/config'
'cat >> ~/.ssh/config <<EOS'
"Host #{repoName}.github.com"
' Hostname github.com'
" IdentityFile ~/.ssh/id_rsa-#{repoName}"
''
'EOS'
"rm #{scriptName}"
'EOR'
]
script.forEach (line) ->
console.log line
| 175583 | 'use strict'
scriptName = process.argv[3] or 'run'
repoUrl = process.argv[2]
repoName = repoUrl.split('/').join '-'
repoArray = repoName.split '-'
repoArray.shift() if repoArray[0] is repoArray[1]
repoName = repoArray.join '-'
script = [
"touch #{scriptName}"
"cat > #{scriptName} <<EOR"
'git remote rm origin'
"git remote add origin <EMAIL>@#{repo<EMAIL>:#{repoUrl}.git"
"ssh-keygen -t rsa -f ~/.ssh/id_rsa-#{repoName} -C https://github.com/#{repoUrl}"
"ssh-add ~/.ssh/id_rsa-#{repoName}"
"pbcopy < ~/.ssh/id_rsa-#{repoName}.pub"
'touch ~/.ssh/config'
'cat >> ~/.ssh/config <<EOS'
"Host #{repoName}.github.com"
' Hostname github.com'
" IdentityFile ~/.ssh/id_rsa-#{repoName}"
''
'EOS'
"rm #{scriptName}"
'EOR'
]
script.forEach (line) ->
console.log line
| true | 'use strict'
scriptName = process.argv[3] or 'run'
repoUrl = process.argv[2]
repoName = repoUrl.split('/').join '-'
repoArray = repoName.split '-'
repoArray.shift() if repoArray[0] is repoArray[1]
repoName = repoArray.join '-'
script = [
"touch #{scriptName}"
"cat > #{scriptName} <<EOR"
'git remote rm origin'
"git remote add origin PI:EMAIL:<EMAIL>END_PI@#{repoPI:EMAIL:<EMAIL>END_PI:#{repoUrl}.git"
"ssh-keygen -t rsa -f ~/.ssh/id_rsa-#{repoName} -C https://github.com/#{repoUrl}"
"ssh-add ~/.ssh/id_rsa-#{repoName}"
"pbcopy < ~/.ssh/id_rsa-#{repoName}.pub"
'touch ~/.ssh/config'
'cat >> ~/.ssh/config <<EOS'
"Host #{repoName}.github.com"
' Hostname github.com'
" IdentityFile ~/.ssh/id_rsa-#{repoName}"
''
'EOS'
"rm #{scriptName}"
'EOR'
]
script.forEach (line) ->
console.log line
|
[
{
"context": " \"Lydian\",\n \"Ionian\",\n \"Mixolydian\",\n \"Dorian\",\n \"Aeolian\",\n \"Phrygian\",\n \"Locrian\"]\n\n",
"end": 122,
"score": 0.7015489339828491,
"start": 116,
"tag": "NAME",
"value": "Dorian"
},
{
"context": "\"Mixolydian\",\n \"Dorian\",\n... | coffee/app.coffee | ealang/pitchspace-vis-webmidi | 0 | S = "\u266F"
F = "\u266D"
INITMODE = 1
INITTONIC = 0
MODES = [
"Lydian",
"Ionian",
"Mixolydian",
"Dorian",
"Aeolian",
"Phrygian",
"Locrian"]
TONICS = ["C", "C" + S, "D", "D" + S, "E", "F", "F" + S, "G", "G" + S, "A", "A" + S, "B",
"C", "D" + F, "D", "E" + F, "E", "F", "G" + F, "G", "A" + F, "A", "B" + F, "B"]
class App
constructor: (view, midi) ->
activeNotes = []
selected =
mode: INITMODE
tonic: INITTONIC
device: 0
devices = midi.getDevicesList()
midi.selectDevice(0) if devices.length >= 1
drawApp = ->
view.drawAppPage(MODES[selected.mode], TONICS[selected.tonic], getScale(MODES[selected.mode], TONICS[selected.tonic]))
view.onClickSettings = (option, val) ->
switch (option)
when "mode" then selected.mode = val
when "tonic" then selected.tonic = val
when "device"
selected.device = val
midi.selectDevice(val)
activeNodes = []
drawApp()
view.drawSettingsPage(MODES, TONICS, devices, selected.mode, selected.tonic, selected.device)
drawApp()
midi.onKeyPress (event) ->
if event.noteOn == on and (event.noteNum in activeNotes) == no
activeNotes.push(event.noteNum)
view.drawNoteAttack(event.noteNum, event.velocity)
else if event.noteOn == off and (event.noteNum in activeNotes)
activeNotes.splice(activeNotes.indexOf(event.noteNum), 1)
view.drawNoteRelease(event.noteNum)
view.drawActiveNotes(activeNotes)
| 84184 | S = "\u266F"
F = "\u266D"
INITMODE = 1
INITTONIC = 0
MODES = [
"Lydian",
"Ionian",
"Mixolydian",
"<NAME>",
"Aeolian",
"Ph<NAME>gian",
"Locrian"]
TONICS = ["C", "C" + S, "D", "D" + S, "E", "F", "F" + S, "G", "G" + S, "A", "A" + S, "B",
"C", "D" + F, "D", "E" + F, "E", "F", "G" + F, "G", "A" + F, "A", "B" + F, "B"]
class App
constructor: (view, midi) ->
activeNotes = []
selected =
mode: INITMODE
tonic: INITTONIC
device: 0
devices = midi.getDevicesList()
midi.selectDevice(0) if devices.length >= 1
drawApp = ->
view.drawAppPage(MODES[selected.mode], TONICS[selected.tonic], getScale(MODES[selected.mode], TONICS[selected.tonic]))
view.onClickSettings = (option, val) ->
switch (option)
when "mode" then selected.mode = val
when "tonic" then selected.tonic = val
when "device"
selected.device = val
midi.selectDevice(val)
activeNodes = []
drawApp()
view.drawSettingsPage(MODES, TONICS, devices, selected.mode, selected.tonic, selected.device)
drawApp()
midi.onKeyPress (event) ->
if event.noteOn == on and (event.noteNum in activeNotes) == no
activeNotes.push(event.noteNum)
view.drawNoteAttack(event.noteNum, event.velocity)
else if event.noteOn == off and (event.noteNum in activeNotes)
activeNotes.splice(activeNotes.indexOf(event.noteNum), 1)
view.drawNoteRelease(event.noteNum)
view.drawActiveNotes(activeNotes)
| true | S = "\u266F"
F = "\u266D"
INITMODE = 1
INITTONIC = 0
MODES = [
"Lydian",
"Ionian",
"Mixolydian",
"PI:NAME:<NAME>END_PI",
"Aeolian",
"PhPI:NAME:<NAME>END_PIgian",
"Locrian"]
TONICS = ["C", "C" + S, "D", "D" + S, "E", "F", "F" + S, "G", "G" + S, "A", "A" + S, "B",
"C", "D" + F, "D", "E" + F, "E", "F", "G" + F, "G", "A" + F, "A", "B" + F, "B"]
class App
constructor: (view, midi) ->
activeNotes = []
selected =
mode: INITMODE
tonic: INITTONIC
device: 0
devices = midi.getDevicesList()
midi.selectDevice(0) if devices.length >= 1
drawApp = ->
view.drawAppPage(MODES[selected.mode], TONICS[selected.tonic], getScale(MODES[selected.mode], TONICS[selected.tonic]))
view.onClickSettings = (option, val) ->
switch (option)
when "mode" then selected.mode = val
when "tonic" then selected.tonic = val
when "device"
selected.device = val
midi.selectDevice(val)
activeNodes = []
drawApp()
view.drawSettingsPage(MODES, TONICS, devices, selected.mode, selected.tonic, selected.device)
drawApp()
midi.onKeyPress (event) ->
if event.noteOn == on and (event.noteNum in activeNotes) == no
activeNotes.push(event.noteNum)
view.drawNoteAttack(event.noteNum, event.velocity)
else if event.noteOn == off and (event.noteNum in activeNotes)
activeNotes.splice(activeNotes.indexOf(event.noteNum), 1)
view.drawNoteRelease(event.noteNum)
view.drawActiveNotes(activeNotes)
|
[
{
"context": "ount({\n # url: '${2:/api}',\n # id: 'user123',\n # cacheKey: 'myapp.session',\n # ",
"end": 175,
"score": 0.9962016344070435,
"start": 168,
"tag": "USERNAME",
"value": "user123"
},
{
"context": "\n # 'descriptionMoreURL' : 'https://github... | snippets/account-snippets.cson | distalx/hoodie-api-atom | 0 | '.source.js':
# 'account constructor':
# 'prefix': 'new Account'
# 'body': """
# var account = new Account({
# url: '${2:/api}',
# id: 'user123',
# cacheKey: 'myapp.session',
# validate: function (options) {
# if (options.username.length < 3) {
# throw new Error('Username must have at least 3 characters')
# }
# });
#
# """
# 'description': 'Account constructor'
# 'rightLabelHTML' : 'new Account(options)'
# 'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-client#constructor'
'account signUp':
'prefix': 'signUp'
'body': """
${1:hoodie}.account.signUp({
username: ${2:username},
password: ${3:password}
})
.then(function() {
return hoodie.account.signIn({
username: ${4:username},
password: ${5:password}
});
})
.then(function() {
${7:alert("Good Job! On to the next slide!");}
})
.catch(function(errror) {
${8:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a new user account on the Hoodie server. Does not sign in the user automatically, account.signIn must be called separately.'
'rightLabelHTML' : 'account.signUp(accountProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignup'
'account signIn':
'prefix': 'signIn'
'body': """
${1:hoodie}.account.signIn({
username: ${2:username},
password: ${3:password}
})
.then(function() {
})
.catch(function(errror) {
${7:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a user session'
'rightLabelHTML' : 'account.signIn(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignin'
'account signOut':
'prefix': 'signOut'
'body': """
${1:hoodie}.account.signOut().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Deletes the user’s session'
'rightLabelHTML' : 'account.signOut()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignout'
'account destroy':
'prefix': 'destroy'
'body': """
${1:hoodie}.account.destroy().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Destroys the account of the currently signed in user.'
'rightLabelHTML' : 'account.destroy()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountdestroy'
'account get':
'prefix': 'get'
'body': '${1:hoodie.}account.get()$2'
'description': 'Returns account properties from local cache.'
'rightLabelHTML' : 'account.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountget'
'account fetch':
'prefix': 'fetch'
'body': """
${1:hoodie.}account.fetch().then(function (properties) {
${2:alert('You signed up at ' + properties.createdAt)}
})$3
"""
'description': 'Fetches account properties from server.'
'rightLabelHTML' : 'account.fetch(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountfetch'
'account update':
'prefix': 'update'
'body': """
${1:hoodie.}account.update({${2:username}: '${3:treetrunks}'}).then(function (properties) {
${4:alert('You are now known as ' + properties.username)}
})$5
"""
'description': 'Update account properties on server and local cache'
'rightLabelHTML' : 'account.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountupdate'
'account profile get':
'prefix': 'profile get'
'body': '${1:hoodie.}account.profile.get($2)$3'
'description': 'Returns profile properties from local cache.'
'rightLabelHTML' : 'account.profile.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileget'
'account profile fetch':
'prefix': 'profile fetch'
'body': """
${1:hoodie.}account.profile.fetch().then(function (properties) {
${2:alert('Hey there ' + properties.fullname)}
})$4
"""
'description': 'Fetches profile properties from server.'
'rightLabelHTML' : 'account.profile.fetch(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofilefetch'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.profile.update({fullname: '${2:Prof Pat Hook}'}).then(function (properties) {
${3:alert('Congratulations, ' + properties.fullname)}
})$4
"""
'description': 'Update profile properties on server and local cache'
'rightLabelHTML' : 'account.profile.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileupdate'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.request({type: 'passwordreset', contact: '${2:pat@example.com}'}).then(function (properties) {
${3:alert('A password reset link was sent to ' + properties.contact)}
})$4
"""
'description': 'Sends a custom request to the server, for things like password resets, account upgrades, etc.'
'rightLabelHTML' : 'account.request(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountrequest'
'account on':
'prefix': 'account on'
'body': """
${1:hoodie.}account.on('${2:event}', function () {
$3
})$4
"""
'description': 'Call function at given account event.'
'rightLabelHTML' : 'account.on(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accounton'
'account one':
'prefix': 'account one'
'body': """
${1:hoodie.}account.one('${2:event}', function () {
$3
})$4
"""
'description': 'Call function once at given account event.'
'rightLabelHTML' : 'account.one(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountone'
'account off':
'prefix': 'account off'
'body': """
${1:hoodie.}account.off('${2:event}', function () {
$3
})$4
"""
'description': 'Removes event handler that has been added before'
'rightLabelHTML' : 'account.off(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountoff'
'account hooks before':
'prefix': 'hook before'
'body': """
${1:hoodie.}account.hook.before('signin', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook befor'
'rightLabelHTML' : 'account.hook.before(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
'account hooks after':
'prefix': 'hook after'
'body': """
${1:hoodie.}account.hook.after('signout', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook after'
'rightLabelHTML' : 'account.hook.after(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
| 51364 | '.source.js':
# 'account constructor':
# 'prefix': 'new Account'
# 'body': """
# var account = new Account({
# url: '${2:/api}',
# id: 'user123',
# cacheKey: 'myapp.session',
# validate: function (options) {
# if (options.username.length < 3) {
# throw new Error('Username must have at least 3 characters')
# }
# });
#
# """
# 'description': 'Account constructor'
# 'rightLabelHTML' : 'new Account(options)'
# 'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-client#constructor'
'account signUp':
'prefix': 'signUp'
'body': """
${1:hoodie}.account.signUp({
username: ${2:username},
password: ${<PASSWORD>:<PASSWORD>}
})
.then(function() {
return hoodie.account.signIn({
username: ${4:username},
password: ${<PASSWORD>}
});
})
.then(function() {
${7:alert("Good Job! On to the next slide!");}
})
.catch(function(errror) {
${8:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a new user account on the Hoodie server. Does not sign in the user automatically, account.signIn must be called separately.'
'rightLabelHTML' : 'account.signUp(accountProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignup'
'account signIn':
'prefix': 'signIn'
'body': """
${1:hoodie}.account.signIn({
username: ${2:username},
password: ${3:<PASSWORD>}
})
.then(function() {
})
.catch(function(errror) {
${7:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a user session'
'rightLabelHTML' : 'account.signIn(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignin'
'account signOut':
'prefix': 'signOut'
'body': """
${1:hoodie}.account.signOut().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Deletes the user’s session'
'rightLabelHTML' : 'account.signOut()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignout'
'account destroy':
'prefix': 'destroy'
'body': """
${1:hoodie}.account.destroy().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Destroys the account of the currently signed in user.'
'rightLabelHTML' : 'account.destroy()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountdestroy'
'account get':
'prefix': 'get'
'body': '${1:hoodie.}account.get()$2'
'description': 'Returns account properties from local cache.'
'rightLabelHTML' : 'account.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountget'
'account fetch':
'prefix': 'fetch'
'body': """
${1:hoodie.}account.fetch().then(function (properties) {
${2:alert('You signed up at ' + properties.createdAt)}
})$3
"""
'description': 'Fetches account properties from server.'
'rightLabelHTML' : 'account.fetch(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountfetch'
'account update':
'prefix': 'update'
'body': """
${1:hoodie.}account.update({${2:username}: '${3:treetrunks}'}).then(function (properties) {
${4:alert('You are now known as ' + properties.username)}
})$5
"""
'description': 'Update account properties on server and local cache'
'rightLabelHTML' : 'account.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountupdate'
'account profile get':
'prefix': 'profile get'
'body': '${1:hoodie.}account.profile.get($2)$3'
'description': 'Returns profile properties from local cache.'
'rightLabelHTML' : 'account.profile.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileget'
'account profile fetch':
'prefix': 'profile fetch'
'body': """
${1:hoodie.}account.profile.fetch().then(function (properties) {
${2:alert('Hey there ' + properties.fullname)}
})$4
"""
'description': 'Fetches profile properties from server.'
'rightLabelHTML' : 'account.profile.fetch(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofilefetch'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.profile.update({fullname: '${2:<NAME>}'}).then(function (properties) {
${3:alert('Congratulations, ' + properties.fullname)}
})$4
"""
'description': 'Update profile properties on server and local cache'
'rightLabelHTML' : 'account.profile.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileupdate'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.request({type: 'passwordreset', contact: '${2:<EMAIL>}'}).then(function (properties) {
${3:alert('A password reset link was sent to ' + properties.contact)}
})$4
"""
'description': 'Sends a custom request to the server, for things like password resets, account upgrades, etc.'
'rightLabelHTML' : 'account.request(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountrequest'
'account on':
'prefix': 'account on'
'body': """
${1:hoodie.}account.on('${2:event}', function () {
$3
})$4
"""
'description': 'Call function at given account event.'
'rightLabelHTML' : 'account.on(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accounton'
'account one':
'prefix': 'account one'
'body': """
${1:hoodie.}account.one('${2:event}', function () {
$3
})$4
"""
'description': 'Call function once at given account event.'
'rightLabelHTML' : 'account.one(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountone'
'account off':
'prefix': 'account off'
'body': """
${1:hoodie.}account.off('${2:event}', function () {
$3
})$4
"""
'description': 'Removes event handler that has been added before'
'rightLabelHTML' : 'account.off(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountoff'
'account hooks before':
'prefix': 'hook before'
'body': """
${1:hoodie.}account.hook.before('signin', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook befor'
'rightLabelHTML' : 'account.hook.before(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
'account hooks after':
'prefix': 'hook after'
'body': """
${1:hoodie.}account.hook.after('signout', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook after'
'rightLabelHTML' : 'account.hook.after(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
| true | '.source.js':
# 'account constructor':
# 'prefix': 'new Account'
# 'body': """
# var account = new Account({
# url: '${2:/api}',
# id: 'user123',
# cacheKey: 'myapp.session',
# validate: function (options) {
# if (options.username.length < 3) {
# throw new Error('Username must have at least 3 characters')
# }
# });
#
# """
# 'description': 'Account constructor'
# 'rightLabelHTML' : 'new Account(options)'
# 'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-client#constructor'
'account signUp':
'prefix': 'signUp'
'body': """
${1:hoodie}.account.signUp({
username: ${2:username},
password: ${PI:PASSWORD:<PASSWORD>END_PI:PI:PASSWORD:<PASSWORD>END_PI}
})
.then(function() {
return hoodie.account.signIn({
username: ${4:username},
password: ${PI:PASSWORD:<PASSWORD>END_PI}
});
})
.then(function() {
${7:alert("Good Job! On to the next slide!");}
})
.catch(function(errror) {
${8:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a new user account on the Hoodie server. Does not sign in the user automatically, account.signIn must be called separately.'
'rightLabelHTML' : 'account.signUp(accountProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignup'
'account signIn':
'prefix': 'signIn'
'body': """
${1:hoodie}.account.signIn({
username: ${2:username},
password: ${3:PI:PASSWORD:<PASSWORD>END_PI}
})
.then(function() {
})
.catch(function(errror) {
${7:alert('Ooops, something went wrong: ' + error.message);}
})$9
"""
'description': 'Creates a user session'
'rightLabelHTML' : 'account.signIn(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignin'
'account signOut':
'prefix': 'signOut'
'body': """
${1:hoodie}.account.signOut().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Deletes the user’s session'
'rightLabelHTML' : 'account.signOut()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountsignout'
'account destroy':
'prefix': 'destroy'
'body': """
${1:hoodie}.account.destroy().then(function(sessionProperties) {
${2:alert('Bye, ' + sessionProperties.account.username)}
})
.catch(function(errror) {
${3:alert('Ooops, something went wrong: ' + error.message);}
})$3
"""
'description': 'Destroys the account of the currently signed in user.'
'rightLabelHTML' : 'account.destroy()'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountdestroy'
'account get':
'prefix': 'get'
'body': '${1:hoodie.}account.get()$2'
'description': 'Returns account properties from local cache.'
'rightLabelHTML' : 'account.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountget'
'account fetch':
'prefix': 'fetch'
'body': """
${1:hoodie.}account.fetch().then(function (properties) {
${2:alert('You signed up at ' + properties.createdAt)}
})$3
"""
'description': 'Fetches account properties from server.'
'rightLabelHTML' : 'account.fetch(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountfetch'
'account update':
'prefix': 'update'
'body': """
${1:hoodie.}account.update({${2:username}: '${3:treetrunks}'}).then(function (properties) {
${4:alert('You are now known as ' + properties.username)}
})$5
"""
'description': 'Update account properties on server and local cache'
'rightLabelHTML' : 'account.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountupdate'
'account profile get':
'prefix': 'profile get'
'body': '${1:hoodie.}account.profile.get($2)$3'
'description': 'Returns profile properties from local cache.'
'rightLabelHTML' : 'account.profile.get(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileget'
'account profile fetch':
'prefix': 'profile fetch'
'body': """
${1:hoodie.}account.profile.fetch().then(function (properties) {
${2:alert('Hey there ' + properties.fullname)}
})$4
"""
'description': 'Fetches profile properties from server.'
'rightLabelHTML' : 'account.profile.fetch(options)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofilefetch'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.profile.update({fullname: '${2:PI:NAME:<NAME>END_PI}'}).then(function (properties) {
${3:alert('Congratulations, ' + properties.fullname)}
})$4
"""
'description': 'Update profile properties on server and local cache'
'rightLabelHTML' : 'account.profile.update(changedProperties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountprofileupdate'
'account profile update':
'prefix': 'profile update'
'body': """
${1:hoodie.}account.request({type: 'passwordreset', contact: '${2:PI:EMAIL:<EMAIL>END_PI}'}).then(function (properties) {
${3:alert('A password reset link was sent to ' + properties.contact)}
})$4
"""
'description': 'Sends a custom request to the server, for things like password resets, account upgrades, etc.'
'rightLabelHTML' : 'account.request(properties)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountrequest'
'account on':
'prefix': 'account on'
'body': """
${1:hoodie.}account.on('${2:event}', function () {
$3
})$4
"""
'description': 'Call function at given account event.'
'rightLabelHTML' : 'account.on(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accounton'
'account one':
'prefix': 'account one'
'body': """
${1:hoodie.}account.one('${2:event}', function () {
$3
})$4
"""
'description': 'Call function once at given account event.'
'rightLabelHTML' : 'account.one(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountone'
'account off':
'prefix': 'account off'
'body': """
${1:hoodie.}account.off('${2:event}', function () {
$3
})$4
"""
'description': 'Removes event handler that has been added before'
'rightLabelHTML' : 'account.off(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#accountoff'
'account hooks before':
'prefix': 'hook before'
'body': """
${1:hoodie.}account.hook.before('signin', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook befor'
'rightLabelHTML' : 'account.hook.before(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
'account hooks after':
'prefix': 'hook after'
'body': """
${1:hoodie.}account.hook.after('signout', function (options) {
return localUserStore.clear()
})$4
"""
'description': 'Account Hook after'
'rightLabelHTML' : 'account.hook.after(event, handler)'
'descriptionMoreURL' : 'https://github.com/hoodiehq/hoodie-account-client#hooks'
|
[
{
"context": "xtends LayerInfo\n @shouldParse: (key) -> key is 'lnk2'\n\n constructor: (layer, length) ->\n super(lay",
"end": 210,
"score": 0.9943045973777771,
"start": 206,
"tag": "KEY",
"value": "lnk2"
}
] | src/psd/layer_info/linked_layer.coffee | taofei-pro/psd.js | 0 | LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
Util = require '../util.coffee'
module.exports = class LinkedLayer extends LayerInfo
@shouldParse: (key) -> key is 'lnk2'
constructor: (layer, length) ->
super(layer, length)
parse: ->
end = @file.tell() + @length
@files = []
while @file.tell() < end
obj = {}
@file.seek 4, true
length = 1 + Util.pad4 this.file.readInt()
fileEnd = length + @file.tell()
kind = @file.readString(4)
version = @file.readInt()
obj.uuid = @file.readString(this.file.readByte())
obj.fileName = @file.readUnicodeString()
obj.fileType = @file.readString(4)
creator = @file.readString(4)
@file.seek 4, true
obj.datasize = @file.readInt()
obj.openFile = @file.readBoolean()
if obj.openFile is true
@file.seek 4, true
obj.openFile = new Descriptor(@file).parse()
if kind is 'liFD'
obj.fileData = @file.read(obj.datasize)
if version >= 5
obj.childId = @file.readUnicodeString()
if version >= 6
obj.modTime = @file.readDouble()
if version >= 7
obj.lockedState = @file.readBoolean()
@files.push obj
@file.seek fileEnd
@file.seek end
return @files | 201094 | LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
Util = require '../util.coffee'
module.exports = class LinkedLayer extends LayerInfo
@shouldParse: (key) -> key is '<KEY>'
constructor: (layer, length) ->
super(layer, length)
parse: ->
end = @file.tell() + @length
@files = []
while @file.tell() < end
obj = {}
@file.seek 4, true
length = 1 + Util.pad4 this.file.readInt()
fileEnd = length + @file.tell()
kind = @file.readString(4)
version = @file.readInt()
obj.uuid = @file.readString(this.file.readByte())
obj.fileName = @file.readUnicodeString()
obj.fileType = @file.readString(4)
creator = @file.readString(4)
@file.seek 4, true
obj.datasize = @file.readInt()
obj.openFile = @file.readBoolean()
if obj.openFile is true
@file.seek 4, true
obj.openFile = new Descriptor(@file).parse()
if kind is 'liFD'
obj.fileData = @file.read(obj.datasize)
if version >= 5
obj.childId = @file.readUnicodeString()
if version >= 6
obj.modTime = @file.readDouble()
if version >= 7
obj.lockedState = @file.readBoolean()
@files.push obj
@file.seek fileEnd
@file.seek end
return @files | true | LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
Util = require '../util.coffee'
module.exports = class LinkedLayer extends LayerInfo
@shouldParse: (key) -> key is 'PI:KEY:<KEY>END_PI'
constructor: (layer, length) ->
super(layer, length)
parse: ->
end = @file.tell() + @length
@files = []
while @file.tell() < end
obj = {}
@file.seek 4, true
length = 1 + Util.pad4 this.file.readInt()
fileEnd = length + @file.tell()
kind = @file.readString(4)
version = @file.readInt()
obj.uuid = @file.readString(this.file.readByte())
obj.fileName = @file.readUnicodeString()
obj.fileType = @file.readString(4)
creator = @file.readString(4)
@file.seek 4, true
obj.datasize = @file.readInt()
obj.openFile = @file.readBoolean()
if obj.openFile is true
@file.seek 4, true
obj.openFile = new Descriptor(@file).parse()
if kind is 'liFD'
obj.fileData = @file.read(obj.datasize)
if version >= 5
obj.childId = @file.readUnicodeString()
if version >= 6
obj.modTime = @file.readDouble()
if version >= 7
obj.lockedState = @file.readBoolean()
@files.push obj
@file.seek fileEnd
@file.seek end
return @files |
[
{
"context": "lection: new Luca.Collection([\n# author: \"George Orwell\"\n# title: \"Animal Farm\"\n# ,\n# ",
"end": 509,
"score": 0.999890923500061,
"start": 496,
"tag": "NAME",
"value": "George Orwell"
},
{
"context": "itle: \"Animal Farm\"\n# ,\n... | app/assets/javascripts/luca/components/collection_view.coffee | datapimp/luca | 4 | # The `Luca.CollectionView` renders models from a `Luca.Collection` into multiple
# elements, and provides methods for filtering, paginating, sorting the underlying
# collection and re-rendering the contents of its `@el` accordingly.
#
# #### Basic Example
# collectionView = Luca.register "App.views.Books"
# collectionView.extends "Luca.CollectionView"
#
# collectionView.defines
# itemProperty: "author"
# collection: new Luca.Collection([
# author: "George Orwell"
# title: "Animal Farm"
# ,
# author: "Noam Chomsky"
# title: "Manufacturing Consent"
# ])
#
# view = new App.views.Books()
# #### Extending it to make it Filterable and Paginatable
# filterable = Luca.register "App.views.FilterableBooks"
# filterable.extends "App.views.Books"
# filterable.defines
# collection: "books"
# paginatable: 12
# filterable:
# query:
# author: "George Orwell"
#
# view = new App.views.FilterableBooks()
# #### Filterable Collections
#
# The `Luca.CollectionView` will attempt to perform a local query against its
# collection which behaves like a `Backbone.QueryCollection`. It will do this
# by default without making a remote request to the API.
#
# If you do not want this behavior, you can configure the `Luca.CollectionView` to
# behave as if the filtering was happen remotely in your REST API.
#
# filterable:
# options:
# remote: true
collectionView = Luca.register "Luca.CollectionView"
collectionView.extends "Luca.Panel"
collectionView.replaces "Luca.components.CollectionView"
collectionView.mixesIn "QueryCollectionBindings",
"LoadMaskable",
"Filterable",
"Paginatable",
"Sortable"
collectionView.triggers "before:refresh",
"after:refresh",
"refresh",
"empty:results"
collectionView.publicConfiguration
# Specify which collection will be used to supply the models to be rendered.
# Accepts either a string alias for the Collection class, or an instance of
# any class which inherits from Backbone.Collection
collection: undefined
# By default the CollectionView will be rendered inside of an OL tag.
tagName: "ol"
# The CollectionView behaves as a Luca.Panel which means it has an area for
# top and bottom toolbars. The actual content that gets rendered from the
# collection will be rendered inside an element with the specified class.
bodyClassName: "collection-ui-panel"
# Each item from the collection will be rendered inside of an element specified by @itemTagName
itemTagName: 'li'
# Each item element will be assigned a CSS class specified by @itemClassName
itemClassName: 'collection-item'
# Specify which template should be used to render each item in the collection.
# Accepts a string which will be passed to Luca.template(@itemTemplate). Your template
# can expect to be passed an object with the `model` and `index` properties on it.
itemTemplate: undefined
# Accepts a reference to a function, which will be called with an object with the `model` and `index`
# properties on it. This function should return a String which will be injected into the item DOM element.
itemRenderer: undefined
# Plucks the specified property from the model and inserts it into the item DOM element.
itemProperty: undefined
# If @observeChanges is set to true, any change in an underlying model will automatically be re-rendered.
observeChanges: false
collectionView.publicMethods
initialize: (@options={})->
_.extend(@, @options)
_.bindAll @, "refresh"
unless @collection? or @options.collection
console.log "Error on initialize of collection view", @
throw "Collection Views must specify a collection"
unless @itemTemplate? || @itemRenderer? || @itemProperty?
throw "Collection Views must specify an item template or item renderer function"
if _.isString(@collection)
if Luca.CollectionManager.get()
@collection = Luca.CollectionManager.get().getOrCreate(@collection)
else
console.log "String Collection but no collection manager"
unless Luca.isBackboneCollection(@collection)
console.log "Missing Collection on #{ @name || @cid }", @, @collection
throw "Collection Views must have a valid backbone collection"
# INVESTIGATE THIS BEING DOUBLE WORK
@on "data:refresh", @refresh, @
@on "collection:reset", @refresh, @
@on "collection:remove", @refresh, @
@on "collection:add", @refresh, @
@on "collection:change", @refreshModel, @ if @observeChanges is true
Luca.Panel::initialize.apply(@, arguments)
view = @
if @getCollection()?.length > 0
@on "after:render", ()->
view.refresh()
view.unbind "after:render", @
# Given the id of a model, find the underlying DOM element which was rendered by this collection.
# Assumes that the data-model-id attribute is set, which it is by default by @attributesForItem.
locateItemElement: (id)->
@$(".#{ @itemClassName }[data-model-id='#{ id }']")
# Refresh is responsible for applying any filtering, pagination, or sorting options that may be set
# from the various Luca.concerns mixed in by `Luca.CollectionView` and making a query to the underlying
# collection. It will then take the set of models returned by `@getModels` and pass them through the
# item rendering pipeline.
refresh: ()->
query = @getLocalQuery()
options = @getQueryOptions()
models = @getModels(query, options)
@$bodyEl().empty()
@trigger("before:refresh", models, query, options)
if models.length is 0
@trigger("empty:results", query, options)
@renderModels(models, query, options)
@trigger("after:refresh", models, query, options)
@
collectionView.privateMethods
renderModels: (models, query, options)->
index = 0
for model in models
@$append @makeItem(model, index++)
# Determines which attributes should be set on the item DOM element.
attributesForItem: (item, model)->
_.extend {}, class: @itemClassName, "data-index": item.index, "data-model-id": item.model.get('id')
# Determines the content for the item DOM element. Will use the appropriate options
# specified by `@itemTemplate`, `@itemRenderer`, or `@itemProperty`
contentForItem: (item={})->
if @itemTemplate? and templateFn = Luca.template(@itemTemplate)
return content = templateFn.call(@, item)
if @itemRenderer? and _.isFunction( @itemRenderer )
return content = @itemRenderer.call(@, item, item.model, item.index)
if @itemProperty and item.model?
return content = item.model.read( @itemProperty )
""
# Uses the various options passed to the `CollectionView` to assemble a call to `Luca.View::make`.
makeItem: (model, index)->
item = if @prepareItem? then @prepareItem.call(@, model, index) else (model:model, index: index)
attributes = @attributesForItem(item, model)
content = @contentForItem(item)
try
Luca.View::make(@itemTagName, attributes, content)
catch e
console.log "Error generating DOM element for CollectionView", @, model, index
# Given a model, attempt to re-render the contents of its item in this view's DOM contents.
refreshModel: (model)->
index = @collection.indexOf( model )
@locateItemElement(model.get('id')).empty().append( @contentForItem({model,index}, model) )
@trigger("model:refreshed", index, model)
registerEvent: (domEvent, selector, handler)->
if !handler? and _.isFunction(selector)
handler = selector
selector = undefined
eventTrigger = _([domEvent,"#{ @itemTagName }.#{ @itemClassName }", selector]).compact().join(" ")
Luca.View::registerEvent(eventTrigger,handler)
collectionView.register()
| 174750 | # The `Luca.CollectionView` renders models from a `Luca.Collection` into multiple
# elements, and provides methods for filtering, paginating, sorting the underlying
# collection and re-rendering the contents of its `@el` accordingly.
#
# #### Basic Example
# collectionView = Luca.register "App.views.Books"
# collectionView.extends "Luca.CollectionView"
#
# collectionView.defines
# itemProperty: "author"
# collection: new Luca.Collection([
# author: "<NAME>"
# title: "Animal Farm"
# ,
# author: "<NAME>"
# title: "Manufacturing Consent"
# ])
#
# view = new App.views.Books()
# #### Extending it to make it Filterable and Paginatable
# filterable = Luca.register "App.views.FilterableBooks"
# filterable.extends "App.views.Books"
# filterable.defines
# collection: "books"
# paginatable: 12
# filterable:
# query:
# author: "<NAME>"
#
# view = new App.views.FilterableBooks()
# #### Filterable Collections
#
# The `Luca.CollectionView` will attempt to perform a local query against its
# collection which behaves like a `Backbone.QueryCollection`. It will do this
# by default without making a remote request to the API.
#
# If you do not want this behavior, you can configure the `Luca.CollectionView` to
# behave as if the filtering was happen remotely in your REST API.
#
# filterable:
# options:
# remote: true
collectionView = Luca.register "Luca.CollectionView"
collectionView.extends "Luca.Panel"
collectionView.replaces "Luca.components.CollectionView"
collectionView.mixesIn "QueryCollectionBindings",
"LoadMaskable",
"Filterable",
"Paginatable",
"Sortable"
collectionView.triggers "before:refresh",
"after:refresh",
"refresh",
"empty:results"
collectionView.publicConfiguration
# Specify which collection will be used to supply the models to be rendered.
# Accepts either a string alias for the Collection class, or an instance of
# any class which inherits from Backbone.Collection
collection: undefined
# By default the CollectionView will be rendered inside of an OL tag.
tagName: "ol"
# The CollectionView behaves as a Luca.Panel which means it has an area for
# top and bottom toolbars. The actual content that gets rendered from the
# collection will be rendered inside an element with the specified class.
bodyClassName: "collection-ui-panel"
# Each item from the collection will be rendered inside of an element specified by @itemTagName
itemTagName: 'li'
# Each item element will be assigned a CSS class specified by @itemClassName
itemClassName: 'collection-item'
# Specify which template should be used to render each item in the collection.
# Accepts a string which will be passed to Luca.template(@itemTemplate). Your template
# can expect to be passed an object with the `model` and `index` properties on it.
itemTemplate: undefined
# Accepts a reference to a function, which will be called with an object with the `model` and `index`
# properties on it. This function should return a String which will be injected into the item DOM element.
itemRenderer: undefined
# Plucks the specified property from the model and inserts it into the item DOM element.
itemProperty: undefined
# If @observeChanges is set to true, any change in an underlying model will automatically be re-rendered.
observeChanges: false
collectionView.publicMethods
initialize: (@options={})->
_.extend(@, @options)
_.bindAll @, "refresh"
unless @collection? or @options.collection
console.log "Error on initialize of collection view", @
throw "Collection Views must specify a collection"
unless @itemTemplate? || @itemRenderer? || @itemProperty?
throw "Collection Views must specify an item template or item renderer function"
if _.isString(@collection)
if Luca.CollectionManager.get()
@collection = Luca.CollectionManager.get().getOrCreate(@collection)
else
console.log "String Collection but no collection manager"
unless Luca.isBackboneCollection(@collection)
console.log "Missing Collection on #{ @name || @cid }", @, @collection
throw "Collection Views must have a valid backbone collection"
# INVESTIGATE THIS BEING DOUBLE WORK
@on "data:refresh", @refresh, @
@on "collection:reset", @refresh, @
@on "collection:remove", @refresh, @
@on "collection:add", @refresh, @
@on "collection:change", @refreshModel, @ if @observeChanges is true
Luca.Panel::initialize.apply(@, arguments)
view = @
if @getCollection()?.length > 0
@on "after:render", ()->
view.refresh()
view.unbind "after:render", @
# Given the id of a model, find the underlying DOM element which was rendered by this collection.
# Assumes that the data-model-id attribute is set, which it is by default by @attributesForItem.
locateItemElement: (id)->
@$(".#{ @itemClassName }[data-model-id='#{ id }']")
# Refresh is responsible for applying any filtering, pagination, or sorting options that may be set
# from the various Luca.concerns mixed in by `Luca.CollectionView` and making a query to the underlying
# collection. It will then take the set of models returned by `@getModels` and pass them through the
# item rendering pipeline.
refresh: ()->
query = @getLocalQuery()
options = @getQueryOptions()
models = @getModels(query, options)
@$bodyEl().empty()
@trigger("before:refresh", models, query, options)
if models.length is 0
@trigger("empty:results", query, options)
@renderModels(models, query, options)
@trigger("after:refresh", models, query, options)
@
collectionView.privateMethods
renderModels: (models, query, options)->
index = 0
for model in models
@$append @makeItem(model, index++)
# Determines which attributes should be set on the item DOM element.
attributesForItem: (item, model)->
_.extend {}, class: @itemClassName, "data-index": item.index, "data-model-id": item.model.get('id')
# Determines the content for the item DOM element. Will use the appropriate options
# specified by `@itemTemplate`, `@itemRenderer`, or `@itemProperty`
contentForItem: (item={})->
if @itemTemplate? and templateFn = Luca.template(@itemTemplate)
return content = templateFn.call(@, item)
if @itemRenderer? and _.isFunction( @itemRenderer )
return content = @itemRenderer.call(@, item, item.model, item.index)
if @itemProperty and item.model?
return content = item.model.read( @itemProperty )
""
# Uses the various options passed to the `CollectionView` to assemble a call to `Luca.View::make`.
makeItem: (model, index)->
item = if @prepareItem? then @prepareItem.call(@, model, index) else (model:model, index: index)
attributes = @attributesForItem(item, model)
content = @contentForItem(item)
try
Luca.View::make(@itemTagName, attributes, content)
catch e
console.log "Error generating DOM element for CollectionView", @, model, index
# Given a model, attempt to re-render the contents of its item in this view's DOM contents.
refreshModel: (model)->
index = @collection.indexOf( model )
@locateItemElement(model.get('id')).empty().append( @contentForItem({model,index}, model) )
@trigger("model:refreshed", index, model)
registerEvent: (domEvent, selector, handler)->
if !handler? and _.isFunction(selector)
handler = selector
selector = undefined
eventTrigger = _([domEvent,"#{ @itemTagName }.#{ @itemClassName }", selector]).compact().join(" ")
Luca.View::registerEvent(eventTrigger,handler)
collectionView.register()
| true | # The `Luca.CollectionView` renders models from a `Luca.Collection` into multiple
# elements, and provides methods for filtering, paginating, sorting the underlying
# collection and re-rendering the contents of its `@el` accordingly.
#
# #### Basic Example
# collectionView = Luca.register "App.views.Books"
# collectionView.extends "Luca.CollectionView"
#
# collectionView.defines
# itemProperty: "author"
# collection: new Luca.Collection([
# author: "PI:NAME:<NAME>END_PI"
# title: "Animal Farm"
# ,
# author: "PI:NAME:<NAME>END_PI"
# title: "Manufacturing Consent"
# ])
#
# view = new App.views.Books()
# #### Extending it to make it Filterable and Paginatable
# filterable = Luca.register "App.views.FilterableBooks"
# filterable.extends "App.views.Books"
# filterable.defines
# collection: "books"
# paginatable: 12
# filterable:
# query:
# author: "PI:NAME:<NAME>END_PI"
#
# view = new App.views.FilterableBooks()
# #### Filterable Collections
#
# The `Luca.CollectionView` will attempt to perform a local query against its
# collection which behaves like a `Backbone.QueryCollection`. It will do this
# by default without making a remote request to the API.
#
# If you do not want this behavior, you can configure the `Luca.CollectionView` to
# behave as if the filtering was happen remotely in your REST API.
#
# filterable:
# options:
# remote: true
collectionView = Luca.register "Luca.CollectionView"
collectionView.extends "Luca.Panel"
collectionView.replaces "Luca.components.CollectionView"
collectionView.mixesIn "QueryCollectionBindings",
"LoadMaskable",
"Filterable",
"Paginatable",
"Sortable"
collectionView.triggers "before:refresh",
"after:refresh",
"refresh",
"empty:results"
collectionView.publicConfiguration
# Specify which collection will be used to supply the models to be rendered.
# Accepts either a string alias for the Collection class, or an instance of
# any class which inherits from Backbone.Collection
collection: undefined
# By default the CollectionView will be rendered inside of an OL tag.
tagName: "ol"
# The CollectionView behaves as a Luca.Panel which means it has an area for
# top and bottom toolbars. The actual content that gets rendered from the
# collection will be rendered inside an element with the specified class.
bodyClassName: "collection-ui-panel"
# Each item from the collection will be rendered inside of an element specified by @itemTagName
itemTagName: 'li'
# Each item element will be assigned a CSS class specified by @itemClassName
itemClassName: 'collection-item'
# Specify which template should be used to render each item in the collection.
# Accepts a string which will be passed to Luca.template(@itemTemplate). Your template
# can expect to be passed an object with the `model` and `index` properties on it.
itemTemplate: undefined
# Accepts a reference to a function, which will be called with an object with the `model` and `index`
# properties on it. This function should return a String which will be injected into the item DOM element.
itemRenderer: undefined
# Plucks the specified property from the model and inserts it into the item DOM element.
itemProperty: undefined
# If @observeChanges is set to true, any change in an underlying model will automatically be re-rendered.
observeChanges: false
collectionView.publicMethods
initialize: (@options={})->
_.extend(@, @options)
_.bindAll @, "refresh"
unless @collection? or @options.collection
console.log "Error on initialize of collection view", @
throw "Collection Views must specify a collection"
unless @itemTemplate? || @itemRenderer? || @itemProperty?
throw "Collection Views must specify an item template or item renderer function"
if _.isString(@collection)
if Luca.CollectionManager.get()
@collection = Luca.CollectionManager.get().getOrCreate(@collection)
else
console.log "String Collection but no collection manager"
unless Luca.isBackboneCollection(@collection)
console.log "Missing Collection on #{ @name || @cid }", @, @collection
throw "Collection Views must have a valid backbone collection"
# INVESTIGATE THIS BEING DOUBLE WORK
@on "data:refresh", @refresh, @
@on "collection:reset", @refresh, @
@on "collection:remove", @refresh, @
@on "collection:add", @refresh, @
@on "collection:change", @refreshModel, @ if @observeChanges is true
Luca.Panel::initialize.apply(@, arguments)
view = @
if @getCollection()?.length > 0
@on "after:render", ()->
view.refresh()
view.unbind "after:render", @
# Given the id of a model, find the underlying DOM element which was rendered by this collection.
# Assumes that the data-model-id attribute is set, which it is by default by @attributesForItem.
locateItemElement: (id)->
@$(".#{ @itemClassName }[data-model-id='#{ id }']")
# Refresh is responsible for applying any filtering, pagination, or sorting options that may be set
# from the various Luca.concerns mixed in by `Luca.CollectionView` and making a query to the underlying
# collection. It will then take the set of models returned by `@getModels` and pass them through the
# item rendering pipeline.
refresh: ()->
query = @getLocalQuery()
options = @getQueryOptions()
models = @getModels(query, options)
@$bodyEl().empty()
@trigger("before:refresh", models, query, options)
if models.length is 0
@trigger("empty:results", query, options)
@renderModels(models, query, options)
@trigger("after:refresh", models, query, options)
@
collectionView.privateMethods
renderModels: (models, query, options)->
index = 0
for model in models
@$append @makeItem(model, index++)
# Determines which attributes should be set on the item DOM element.
attributesForItem: (item, model)->
_.extend {}, class: @itemClassName, "data-index": item.index, "data-model-id": item.model.get('id')
# Determines the content for the item DOM element. Will use the appropriate options
# specified by `@itemTemplate`, `@itemRenderer`, or `@itemProperty`
contentForItem: (item={})->
if @itemTemplate? and templateFn = Luca.template(@itemTemplate)
return content = templateFn.call(@, item)
if @itemRenderer? and _.isFunction( @itemRenderer )
return content = @itemRenderer.call(@, item, item.model, item.index)
if @itemProperty and item.model?
return content = item.model.read( @itemProperty )
""
# Uses the various options passed to the `CollectionView` to assemble a call to `Luca.View::make`.
makeItem: (model, index)->
item = if @prepareItem? then @prepareItem.call(@, model, index) else (model:model, index: index)
attributes = @attributesForItem(item, model)
content = @contentForItem(item)
try
Luca.View::make(@itemTagName, attributes, content)
catch e
console.log "Error generating DOM element for CollectionView", @, model, index
# Given a model, attempt to re-render the contents of its item in this view's DOM contents.
refreshModel: (model)->
index = @collection.indexOf( model )
@locateItemElement(model.get('id')).empty().append( @contentForItem({model,index}, model) )
@trigger("model:refreshed", index, model)
registerEvent: (domEvent, selector, handler)->
if !handler? and _.isFunction(selector)
handler = selector
selector = undefined
eventTrigger = _([domEvent,"#{ @itemTagName }.#{ @itemClassName }", selector]).compact().join(" ")
Luca.View::registerEvent(eventTrigger,handler)
collectionView.register()
|
[
{
"context": "source, URLS) ->\n\n token_defaults =\n token_id: '@id'\n\n ClientToken = $resource [URLS.api, 'clientt",
"end": 68,
"score": 0.6317550539970398,
"start": 66,
"tag": "KEY",
"value": "'@"
},
{
"context": "rce, URLS) ->\n\n token_defaults =\n token_id: '@id'\n\n ... | src/coffee/services/api/client_token.coffee | dadleyy/loftili.ui | 0 | _factory = ($resource, URLS) ->
token_defaults =
token_id: '@id'
ClientToken = $resource [URLS.api, 'clienttokens', ':token_id'].join('/'), token_defaults,
destroy:
method: 'DELETE'
_factory.$inject = ['$resource', 'URLS']
lft.service 'Api/ClientToken', _factory
| 21869 | _factory = ($resource, URLS) ->
token_defaults =
token_id: <KEY> <PASSWORD>'
ClientToken = $resource [URLS.api, 'clienttokens', ':token_id'].join('/'), token_defaults,
destroy:
method: 'DELETE'
_factory.$inject = ['$resource', 'URLS']
lft.service 'Api/ClientToken', _factory
| true | _factory = ($resource, URLS) ->
token_defaults =
token_id: PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI'
ClientToken = $resource [URLS.api, 'clienttokens', ':token_id'].join('/'), token_defaults,
destroy:
method: 'DELETE'
_factory.$inject = ['$resource', 'URLS']
lft.service 'Api/ClientToken', _factory
|
[
{
"context": "trim(this.$(\"#email\").val())\n\t\tuserData.password = s.trim(this.$(\"#password\").val())\n\t\tuserData.requirePass",
"end": 888,
"score": 0.9927790760993958,
"start": 882,
"tag": "PASSWORD",
"value": "s.trim"
},
{
"context": "il\").val())\n\t\tuserData.password = s.t... | packages/rocketchat-ui-admin/admin/users/adminUserEdit.coffee | In4No/chatApp | 0 | Template.adminUserEdit.helpers
canEditOrAdd: ->
return (Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('edit-other-user-info')) or (not Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('add-user'))
user: ->
return Meteor.users.findOne(Session.get('adminSelectedUser'))
Template.adminUserEdit.events
'click .cancel': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.cancel()
'click .save': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.save()
Template.adminUserEdit.onCreated ->
@cancel = =>
RocketChat.TabBar.setTemplate 'adminUserInfo'
@getUserData = =>
userData = { _id: Session.get('adminSelectedUser') }
userData.name = s.trim(this.$("#name").val())
userData.username = s.trim(this.$("#username").val())
userData.email = s.trim(this.$("#email").val())
userData.password = s.trim(this.$("#password").val())
userData.requirePasswordChange = this.$("#changePassword:checked").length > 0
return userData
@validate = =>
userData = this.getUserData()
errors = []
unless userData.name
errors.push 'Name'
unless userData.username
errors.push 'Username'
unless userData.email
errors.push 'E-mail'
for error in errors
toastr.error(TAPi18n.__('The_field_is_required', TAPi18n.__(error)))
return errors.length is 0
@save = =>
if this.validate()
userData = this.getUserData()
Meteor.call 'insertOrUpdateUser', userData, (error, result) =>
if result
if userData._id
toastr.success t('User_updated_successfully')
else
toastr.success t('User_added_successfully')
Session.set('adminSelectedUser', result);
Session.set('showUserInfo', result);
Meteor.subscribe 'fullUserData', userData.username, 1
this.cancel()
if error
toastr.error error.reason
| 85994 | Template.adminUserEdit.helpers
canEditOrAdd: ->
return (Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('edit-other-user-info')) or (not Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('add-user'))
user: ->
return Meteor.users.findOne(Session.get('adminSelectedUser'))
Template.adminUserEdit.events
'click .cancel': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.cancel()
'click .save': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.save()
Template.adminUserEdit.onCreated ->
@cancel = =>
RocketChat.TabBar.setTemplate 'adminUserInfo'
@getUserData = =>
userData = { _id: Session.get('adminSelectedUser') }
userData.name = s.trim(this.$("#name").val())
userData.username = s.trim(this.$("#username").val())
userData.email = s.trim(this.$("#email").val())
userData.password = <PASSWORD>(this.$("#<PASSWORD>").val())
userData.requirePasswordChange = this.$("#changePassword:checked").length > 0
return userData
@validate = =>
userData = this.getUserData()
errors = []
unless userData.name
errors.push 'Name'
unless userData.username
errors.push 'Username'
unless userData.email
errors.push 'E-mail'
for error in errors
toastr.error(TAPi18n.__('The_field_is_required', TAPi18n.__(error)))
return errors.length is 0
@save = =>
if this.validate()
userData = this.getUserData()
Meteor.call 'insertOrUpdateUser', userData, (error, result) =>
if result
if userData._id
toastr.success t('User_updated_successfully')
else
toastr.success t('User_added_successfully')
Session.set('adminSelectedUser', result);
Session.set('showUserInfo', result);
Meteor.subscribe 'fullUserData', userData.username, 1
this.cancel()
if error
toastr.error error.reason
| true | Template.adminUserEdit.helpers
canEditOrAdd: ->
return (Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('edit-other-user-info')) or (not Session.get('adminSelectedUser') and RocketChat.authz.hasAtLeastOnePermission('add-user'))
user: ->
return Meteor.users.findOne(Session.get('adminSelectedUser'))
Template.adminUserEdit.events
'click .cancel': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.cancel()
'click .save': (e, t) ->
e.stopPropagation()
e.preventDefault()
t.save()
Template.adminUserEdit.onCreated ->
@cancel = =>
RocketChat.TabBar.setTemplate 'adminUserInfo'
@getUserData = =>
userData = { _id: Session.get('adminSelectedUser') }
userData.name = s.trim(this.$("#name").val())
userData.username = s.trim(this.$("#username").val())
userData.email = s.trim(this.$("#email").val())
userData.password = PI:PASSWORD:<PASSWORD>END_PI(this.$("#PI:PASSWORD:<PASSWORD>END_PI").val())
userData.requirePasswordChange = this.$("#changePassword:checked").length > 0
return userData
@validate = =>
userData = this.getUserData()
errors = []
unless userData.name
errors.push 'Name'
unless userData.username
errors.push 'Username'
unless userData.email
errors.push 'E-mail'
for error in errors
toastr.error(TAPi18n.__('The_field_is_required', TAPi18n.__(error)))
return errors.length is 0
@save = =>
if this.validate()
userData = this.getUserData()
Meteor.call 'insertOrUpdateUser', userData, (error, result) =>
if result
if userData._id
toastr.success t('User_updated_successfully')
else
toastr.success t('User_added_successfully')
Session.set('adminSelectedUser', result);
Session.set('showUserInfo', result);
Meteor.subscribe 'fullUserData', userData.username, 1
this.cancel()
if error
toastr.error error.reason
|
[
{
"context": "\n React.createElement Lift, key: lift.id, lift: lift\n",
"end": 656,
"score": 0.9050098657608032,
"start": 654,
"tag": "KEY",
"value": "id"
}
] | app/assets/javascripts/components/lifts.js.coffee | tomFelder/fitness-app | 0 | @Lifts = React.createClass
getInitialState: ->
lifts: @props.data
getDefaultProps: ->
lifts: []
Render: ->
React.DOM.div
className: 'lifts'
React.DOM.h1
className: 'title'
'Lifts'
React.DOM.table
className: 'table table-bordered'
React.DOM.thead null
React.DOM.th null, 'Date'
React.DOM.th null, 'Lift Name'
React.DOM.th null, 'Weight Lifted'
React.DOM.th null, 'Reps Performed'
React.DOM.th null, '1 RM'
React.DOM.tbody null,
for lift in @state.lifts
React.createElement Lift, key: lift.id, lift: lift
| 123587 | @Lifts = React.createClass
getInitialState: ->
lifts: @props.data
getDefaultProps: ->
lifts: []
Render: ->
React.DOM.div
className: 'lifts'
React.DOM.h1
className: 'title'
'Lifts'
React.DOM.table
className: 'table table-bordered'
React.DOM.thead null
React.DOM.th null, 'Date'
React.DOM.th null, 'Lift Name'
React.DOM.th null, 'Weight Lifted'
React.DOM.th null, 'Reps Performed'
React.DOM.th null, '1 RM'
React.DOM.tbody null,
for lift in @state.lifts
React.createElement Lift, key: lift.<KEY>, lift: lift
| true | @Lifts = React.createClass
getInitialState: ->
lifts: @props.data
getDefaultProps: ->
lifts: []
Render: ->
React.DOM.div
className: 'lifts'
React.DOM.h1
className: 'title'
'Lifts'
React.DOM.table
className: 'table table-bordered'
React.DOM.thead null
React.DOM.th null, 'Date'
React.DOM.th null, 'Lift Name'
React.DOM.th null, 'Weight Lifted'
React.DOM.th null, 'Reps Performed'
React.DOM.th null, '1 RM'
React.DOM.tbody null,
for lift in @state.lifts
React.createElement Lift, key: lift.PI:KEY:<KEY>END_PI, lift: lift
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999128580093384,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/mp-history/event.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div, span, a, i } from 'react-dom-factories'
import TimeWithTooltip from 'time-with-tooltip'
el = React.createElement
export class Event extends React.Component
icons:
'player-left': ['fas fa-arrow-left', 'far fa-circle']
'player-joined': ['fas fa-arrow-right', 'far fa-circle']
'player-kicked': ['fas fa-arrow-left', 'fas fa-ban']
'match-created': ['fas fa-plus']
'match-disbanded': ['fas fa-times']
'host-changed': ['fas fa-exchange-alt']
render: ->
user = @props.users[@props.event.user_id]
event_type = @props.event.detail.type
if user? && event_type != 'match-disbanded'
userLink = osu.link laroute.route('users.show', user: user.id),
user.username
classNames: ['mp-history-event__username']
div className: 'mp-history-event',
div className: 'mp-history-event__time',
el TimeWithTooltip, dateTime: @props.event.timestamp, format: 'LTS'
div className: "mp-history-event__type mp-history-event__type--#{event_type}",
@icons[event_type].map (m) ->
i key: m, className: m
div
className: 'mp-history-event__text',
dangerouslySetInnerHTML:
__html: osu.trans "multiplayer.match.events.#{event_type}#{if user? then '' else '-no-user'}",
user: userLink
| 222372 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div, span, a, i } from 'react-dom-factories'
import TimeWithTooltip from 'time-with-tooltip'
el = React.createElement
export class Event extends React.Component
icons:
'player-left': ['fas fa-arrow-left', 'far fa-circle']
'player-joined': ['fas fa-arrow-right', 'far fa-circle']
'player-kicked': ['fas fa-arrow-left', 'fas fa-ban']
'match-created': ['fas fa-plus']
'match-disbanded': ['fas fa-times']
'host-changed': ['fas fa-exchange-alt']
render: ->
user = @props.users[@props.event.user_id]
event_type = @props.event.detail.type
if user? && event_type != 'match-disbanded'
userLink = osu.link laroute.route('users.show', user: user.id),
user.username
classNames: ['mp-history-event__username']
div className: 'mp-history-event',
div className: 'mp-history-event__time',
el TimeWithTooltip, dateTime: @props.event.timestamp, format: 'LTS'
div className: "mp-history-event__type mp-history-event__type--#{event_type}",
@icons[event_type].map (m) ->
i key: m, className: m
div
className: 'mp-history-event__text',
dangerouslySetInnerHTML:
__html: osu.trans "multiplayer.match.events.#{event_type}#{if user? then '' else '-no-user'}",
user: userLink
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div, span, a, i } from 'react-dom-factories'
import TimeWithTooltip from 'time-with-tooltip'
el = React.createElement
export class Event extends React.Component
icons:
'player-left': ['fas fa-arrow-left', 'far fa-circle']
'player-joined': ['fas fa-arrow-right', 'far fa-circle']
'player-kicked': ['fas fa-arrow-left', 'fas fa-ban']
'match-created': ['fas fa-plus']
'match-disbanded': ['fas fa-times']
'host-changed': ['fas fa-exchange-alt']
render: ->
user = @props.users[@props.event.user_id]
event_type = @props.event.detail.type
if user? && event_type != 'match-disbanded'
userLink = osu.link laroute.route('users.show', user: user.id),
user.username
classNames: ['mp-history-event__username']
div className: 'mp-history-event',
div className: 'mp-history-event__time',
el TimeWithTooltip, dateTime: @props.event.timestamp, format: 'LTS'
div className: "mp-history-event__type mp-history-event__type--#{event_type}",
@icons[event_type].map (m) ->
i key: m, className: m
div
className: 'mp-history-event__text',
dangerouslySetInnerHTML:
__html: osu.trans "multiplayer.match.events.#{event_type}#{if user? then '' else '-no-user'}",
user: userLink
|
[
{
"context": " v[ca.id].y\n\n\t# Завершающая линия\n#\tx = $(\"#tbl\"+@pa.id).outerWidth(true)\n#\ty = $(\"#tbl\"+@id).position().",
"end": 3018,
"score": 0.7318387031555176,
"start": 3013,
"tag": "EMAIL",
"value": "pa.id"
},
{
"context": "\n#\tas = \"M #{px} #{py} L #{x} #{y}\"\n#\t... | blueocean.coffee | agershun/minday | 0 | ################################################
# MINDAY 0.009 Ly
# gantt.coffee
# Процедуры для работы с диаграммой Гантта
################################################
keyp[BLUEOCEAN] = {}
keyp[OCEAN] = {}
keyp[CHAR] = {}
canvastype[BLUEOCEAN] =-> PASSIVE
childkind[BLUEOCEAN] =-> OCEAN
childkind[OCEAN] =-> IDEA
childkind[CHARS] =-> CHAR
childkind[CHAR] =-> IDEA
Idea::kindBlueOcean = ->
@kind = BLUEOCEAN
for ia,i in @data
ia.pa = @
ia.ix = i
ia.kind = OCEAN
@char.pa = @
@char.ix = "char"
@char.kind = CHARS
for ia,i in @char.data
ia.pa = @
ia.ix = i
ia.kind = CHAR
bocol = ["Red","Green","Blue","Yellow","Brown"] # TODO Сделать массив цветов
draw[BLUEOCEAN] = (level) ->
s = @startIdeaTxt(level)
if @notFrame(level)
s += "<table id='tbl"+@id+"' class='normaltable'><thead><th>"
if @char.length > 0
for ia in @char.data
if ia.kind is CHAR
s += "<th id='tbl"+ia.id+"'>"
s += ia.draw(level+1)
s += "<tbody>"
if @length > 0
s += "<tr>"
s += "<th id='tbl"+@[0].id+"' style='color:"+bocol[0]+";'>"+@[0].draw(level+1)
s += "<td id='cnv"+@id+"' rowspan="+@length+" colspan="+@char.length+" >"
if @length > 1
for i in [1..@length-1]
ia = @[i]
if ia.kind is OCEAN
s += "<tr><th id='tbl"+ia.id+"' style='color:"+bocol[i]+";'>"
s += ia.draw(level+1)
s += @endIdeaTxt(level)
# Теперь рисуем графику
cvv[BLUEOCEAN] =->
v[@id] = {}
v[@id].ww = $("#tbl"+@id).outerWidth(true)
v[@id].hh = $("#tbl"+@id).outerHeight(true)
v[@id].sw = $("#cnv"+@id).position().left
v[@id].sh = $("#cnv"+@id).position().top
v[@id].w = $("#cnv"+@id).outerWidth(true)
v[@id].h = $("#cnv"+@id).outerHeight(true)
$cvv = $("#cvv"+@id)
$cvv.attr "height",v[@id].hh
$cvv.attr "width",v[@id].ww
v[@id].ctx = $cvv[0].getContext("2d")
# console.log v[@id].ctx
for ia in @data
ia.paintOcean(v[@id].ctx)
Idea::paintOcean = (ctx) ->
bocolor = bocol[@ix] # TODO Сделать массив цветов
# console.log bocolor
px = $("#tbl"+@id).outerWidth(true)
py = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2
# px = 0
# py = 0
for ca in @pa.char
v[ca.id] = {}
v[ca.id].ctx = ctx
v[ca.id].x = $("#tbl"+ca.id).position().left+$("#tbl"+ca.id).outerWidth(true)/2
v[ca.id].y = v[@pa.id].sh + v[@pa.id].h / (ca.length) * (@[ca.id]+0.5) - 10 # Поправка на размер кружка
# TODO Сделать размер кружков константой
x = v[ca.id].x
y = v[ca.id].y
r = 10
ctx.beginPath()
ctx.arc x,y,r,0,2*Math.PI
ctx.closePath()
ctx.fillStyle = bocolor
ctx.fill()
if px > 0
# Теперь рисуем линию
ctx.beginPath()
ctx.moveTo(px,py)
ctx.lineTo(v[ca.id].x,v[ca.id].y)
ctx.strokeStyle = bocolor
ctx.lineWidth = 4
ctx.stroke()
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.id] = ppr.path as
# g[@id+"l"+ca.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
px = v[ca.id].x
py = v[ca.id].y
# Завершающая линия
# x = $("#tbl"+@pa.id).outerWidth(true)
# y = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2 - 10 # Попровка на размер
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.id] = ppr.path as
# g[@id+"l"+ca.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
draw[OCEAN] = (level) -> @simpleIdeaTxt(level)
draw[CHAR] = (level) -> @simpleIdeaTxt(level)
#keyp[BLUEOCEAN][KEY_DOWN] =->
# if @[0]? then return @[0].select()
# @
| 31092 | ################################################
# MINDAY 0.009 Ly
# gantt.coffee
# Процедуры для работы с диаграммой Гантта
################################################
keyp[BLUEOCEAN] = {}
keyp[OCEAN] = {}
keyp[CHAR] = {}
canvastype[BLUEOCEAN] =-> PASSIVE
childkind[BLUEOCEAN] =-> OCEAN
childkind[OCEAN] =-> IDEA
childkind[CHARS] =-> CHAR
childkind[CHAR] =-> IDEA
Idea::kindBlueOcean = ->
@kind = BLUEOCEAN
for ia,i in @data
ia.pa = @
ia.ix = i
ia.kind = OCEAN
@char.pa = @
@char.ix = "char"
@char.kind = CHARS
for ia,i in @char.data
ia.pa = @
ia.ix = i
ia.kind = CHAR
bocol = ["Red","Green","Blue","Yellow","Brown"] # TODO Сделать массив цветов
draw[BLUEOCEAN] = (level) ->
s = @startIdeaTxt(level)
if @notFrame(level)
s += "<table id='tbl"+@id+"' class='normaltable'><thead><th>"
if @char.length > 0
for ia in @char.data
if ia.kind is CHAR
s += "<th id='tbl"+ia.id+"'>"
s += ia.draw(level+1)
s += "<tbody>"
if @length > 0
s += "<tr>"
s += "<th id='tbl"+@[0].id+"' style='color:"+bocol[0]+";'>"+@[0].draw(level+1)
s += "<td id='cnv"+@id+"' rowspan="+@length+" colspan="+@char.length+" >"
if @length > 1
for i in [1..@length-1]
ia = @[i]
if ia.kind is OCEAN
s += "<tr><th id='tbl"+ia.id+"' style='color:"+bocol[i]+";'>"
s += ia.draw(level+1)
s += @endIdeaTxt(level)
# Теперь рисуем графику
cvv[BLUEOCEAN] =->
v[@id] = {}
v[@id].ww = $("#tbl"+@id).outerWidth(true)
v[@id].hh = $("#tbl"+@id).outerHeight(true)
v[@id].sw = $("#cnv"+@id).position().left
v[@id].sh = $("#cnv"+@id).position().top
v[@id].w = $("#cnv"+@id).outerWidth(true)
v[@id].h = $("#cnv"+@id).outerHeight(true)
$cvv = $("#cvv"+@id)
$cvv.attr "height",v[@id].hh
$cvv.attr "width",v[@id].ww
v[@id].ctx = $cvv[0].getContext("2d")
# console.log v[@id].ctx
for ia in @data
ia.paintOcean(v[@id].ctx)
Idea::paintOcean = (ctx) ->
bocolor = bocol[@ix] # TODO Сделать массив цветов
# console.log bocolor
px = $("#tbl"+@id).outerWidth(true)
py = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2
# px = 0
# py = 0
for ca in @pa.char
v[ca.id] = {}
v[ca.id].ctx = ctx
v[ca.id].x = $("#tbl"+ca.id).position().left+$("#tbl"+ca.id).outerWidth(true)/2
v[ca.id].y = v[@pa.id].sh + v[@pa.id].h / (ca.length) * (@[ca.id]+0.5) - 10 # Поправка на размер кружка
# TODO Сделать размер кружков константой
x = v[ca.id].x
y = v[ca.id].y
r = 10
ctx.beginPath()
ctx.arc x,y,r,0,2*Math.PI
ctx.closePath()
ctx.fillStyle = bocolor
ctx.fill()
if px > 0
# Теперь рисуем линию
ctx.beginPath()
ctx.moveTo(px,py)
ctx.lineTo(v[ca.id].x,v[ca.id].y)
ctx.strokeStyle = bocolor
ctx.lineWidth = 4
ctx.stroke()
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.id] = ppr.path as
# g[@id+"l"+ca.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
px = v[ca.id].x
py = v[ca.id].y
# Завершающая линия
# x = $("#tbl"+@<EMAIL>).outerWidth(true)
# y = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2 - 10 # Попровка на размер
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.<EMAIL>] = ppr.path as
# g[@id+"l"+ca<EMAIL>.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
draw[OCEAN] = (level) -> @simpleIdeaTxt(level)
draw[CHAR] = (level) -> @simpleIdeaTxt(level)
#keyp[BLUEOCEAN][KEY_DOWN] =->
# if @[0]? then return @[0].select()
# @
| true | ################################################
# MINDAY 0.009 Ly
# gantt.coffee
# Процедуры для работы с диаграммой Гантта
################################################
keyp[BLUEOCEAN] = {}
keyp[OCEAN] = {}
keyp[CHAR] = {}
canvastype[BLUEOCEAN] =-> PASSIVE
childkind[BLUEOCEAN] =-> OCEAN
childkind[OCEAN] =-> IDEA
childkind[CHARS] =-> CHAR
childkind[CHAR] =-> IDEA
Idea::kindBlueOcean = ->
@kind = BLUEOCEAN
for ia,i in @data
ia.pa = @
ia.ix = i
ia.kind = OCEAN
@char.pa = @
@char.ix = "char"
@char.kind = CHARS
for ia,i in @char.data
ia.pa = @
ia.ix = i
ia.kind = CHAR
bocol = ["Red","Green","Blue","Yellow","Brown"] # TODO Сделать массив цветов
draw[BLUEOCEAN] = (level) ->
s = @startIdeaTxt(level)
if @notFrame(level)
s += "<table id='tbl"+@id+"' class='normaltable'><thead><th>"
if @char.length > 0
for ia in @char.data
if ia.kind is CHAR
s += "<th id='tbl"+ia.id+"'>"
s += ia.draw(level+1)
s += "<tbody>"
if @length > 0
s += "<tr>"
s += "<th id='tbl"+@[0].id+"' style='color:"+bocol[0]+";'>"+@[0].draw(level+1)
s += "<td id='cnv"+@id+"' rowspan="+@length+" colspan="+@char.length+" >"
if @length > 1
for i in [1..@length-1]
ia = @[i]
if ia.kind is OCEAN
s += "<tr><th id='tbl"+ia.id+"' style='color:"+bocol[i]+";'>"
s += ia.draw(level+1)
s += @endIdeaTxt(level)
# Теперь рисуем графику
cvv[BLUEOCEAN] =->
v[@id] = {}
v[@id].ww = $("#tbl"+@id).outerWidth(true)
v[@id].hh = $("#tbl"+@id).outerHeight(true)
v[@id].sw = $("#cnv"+@id).position().left
v[@id].sh = $("#cnv"+@id).position().top
v[@id].w = $("#cnv"+@id).outerWidth(true)
v[@id].h = $("#cnv"+@id).outerHeight(true)
$cvv = $("#cvv"+@id)
$cvv.attr "height",v[@id].hh
$cvv.attr "width",v[@id].ww
v[@id].ctx = $cvv[0].getContext("2d")
# console.log v[@id].ctx
for ia in @data
ia.paintOcean(v[@id].ctx)
Idea::paintOcean = (ctx) ->
bocolor = bocol[@ix] # TODO Сделать массив цветов
# console.log bocolor
px = $("#tbl"+@id).outerWidth(true)
py = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2
# px = 0
# py = 0
for ca in @pa.char
v[ca.id] = {}
v[ca.id].ctx = ctx
v[ca.id].x = $("#tbl"+ca.id).position().left+$("#tbl"+ca.id).outerWidth(true)/2
v[ca.id].y = v[@pa.id].sh + v[@pa.id].h / (ca.length) * (@[ca.id]+0.5) - 10 # Поправка на размер кружка
# TODO Сделать размер кружков константой
x = v[ca.id].x
y = v[ca.id].y
r = 10
ctx.beginPath()
ctx.arc x,y,r,0,2*Math.PI
ctx.closePath()
ctx.fillStyle = bocolor
ctx.fill()
if px > 0
# Теперь рисуем линию
ctx.beginPath()
ctx.moveTo(px,py)
ctx.lineTo(v[ca.id].x,v[ca.id].y)
ctx.strokeStyle = bocolor
ctx.lineWidth = 4
ctx.stroke()
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.id] = ppr.path as
# g[@id+"l"+ca.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
px = v[ca.id].x
py = v[ca.id].y
# Завершающая линия
# x = $("#tbl"+@PI:EMAIL:<EMAIL>END_PI).outerWidth(true)
# y = $("#tbl"+@id).position().top+$("#tbl"+@id).outerHeight(true)/2 - 10 # Попровка на размер
# as = "M #{px} #{py} L #{x} #{y}"
# g[@id+"l"+ca.PI:EMAIL:<EMAIL>END_PI] = ppr.path as
# g[@id+"l"+caPI:EMAIL:<EMAIL>END_PI.id].attr
# "stroke": bocolor
# "stroke-opacity": 0.5
# "stroke-width":4
# "fill":"90-#FF9-#FFE"
# "fill-opacity": 1
draw[OCEAN] = (level) -> @simpleIdeaTxt(level)
draw[CHAR] = (level) -> @simpleIdeaTxt(level)
#keyp[BLUEOCEAN][KEY_DOWN] =->
# if @[0]? then return @[0].select()
# @
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999114274978638,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
},
{
"context": ",\n el UserCard,\n user: user\n ... | resources/assets/coffee/react/beatmap-discussions/discussion.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { NewReply } from './new-reply'
import { Post } from './post'
import { SystemPost } from './system-post'
import { UserCard } from './user-card'
import mapperGroup from 'beatmap-discussions/mapper-group'
import * as React from 'react'
import { button, div, i, span, a } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
bn = 'beatmap-discussion'
export class Discussion extends React.PureComponent
constructor: (props) ->
super props
@eventId = "beatmap-discussion-entry-#{@props.discussion.id}"
@tooltips = {}
@state =
collapsed: false
highlighted: false
componentWillMount: =>
$.subscribe "beatmapDiscussionEntry:collapse.#{@eventId}", @setCollapse
$.subscribe "beatmapDiscussionEntry:highlight.#{@eventId}", @setHighlight
componentWillUnmount: =>
$.unsubscribe ".#{@eventId}"
@voteXhr?.abort()
componentDidUpdate: =>
_.each @tooltips, (tooltip, type) =>
@refreshTooltip(tooltip.qtip('api'), type)
render: =>
return null if !@isVisible(@props.discussion)
return null if !@props.discussion.starting_post && (!@props.discussion.posts || @props.discussion.posts.length == 0)
topClasses = "#{bn} js-beatmap-discussion-jump"
topClasses += " #{bn}--highlighted" if @state.highlighted
topClasses += " #{bn}--deleted" if @props.discussion.deleted_at?
topClasses += " #{bn}--timeline" if @props.discussion.timestamp?
topClasses += " #{bn}--preview" if @props.preview
topClasses += " #{bn}--review" if @props.discussion.message_type == 'review'
lineClasses = "#{bn}__line"
lineClasses += " #{bn}__line--resolved" if @props.discussion.resolved
lastResolvedState = false
@_resolvedSystemPostId = null
firstPost = @props.discussion.starting_post || @props.discussion.posts[0]
user = @props.users[@props.discussion.user_id]
badge = if user.id == @props.beatmapset.user_id then mapperGroup else user.group_badge
topClasses += " #{bn}--unread" unless _.includes(@props.readPostIds, firstPost.id) || @isOwner(firstPost) || @props.preview
div
className: topClasses
'data-id': @props.discussion.id
onClick: @emitSetHighlight
div className: "#{bn}__timestamp hidden-xs",
@timestamp()
div className: "#{bn}__compact",
div className: "#{bn}__discussion",
div
className: "#{bn}__top"
style:
color: osu.groupColour(badge)
div className: "#{bn}__discussion-header",
el UserCard,
user: user
badge: badge
hideStripe: true
@postButtons() if !@props.preview
div className: "#{bn}__review-wrapper",
@post firstPost, 'discussion', true
@postFooter() if !@props.preview
div className: lineClasses
div className: "#{bn}__full",
div className: "#{bn}__discussion",
div className: "#{bn}__top",
@post firstPost, 'discussion'
@postButtons() if !@props.preview
@postFooter() if !@props.preview
div className: lineClasses
postButtons: =>
div className: "#{bn}__actions-container",
div className: "#{bn}__actions",
if @props.parentDiscussion?
a
href: BeatmapDiscussionHelper.url({discussion: @props.parentDiscussion})
title: osu.trans('beatmap_discussions.review.go_to_parent')
className: "#{bn}__link-to-parent",
i className: 'fas fa-tasks'
['up', 'down'].map (type) =>
div
key: type
type: type
className: "#{bn}__action"
onMouseOver: @showVoters
onTouchStart: @showVoters
@displayVote type
@voterList type
button
className: "#{bn}__action #{bn}__action--with-line"
onClick: @toggleExpand
div
className: "beatmap-discussion-expand #{'beatmap-discussion-expand--expanded' if !@state.collapsed}"
i className: 'fas fa-chevron-down'
postFooter: =>
div
className: "#{bn}__expanded #{'hidden' if @state.collapsed}"
div
className: "#{bn}__replies"
for reply in @props.discussion.posts.slice(1)
continue unless @isVisible(reply)
if reply.system && reply.message.type == 'resolved'
currentResolvedState = reply.message.value
continue if lastResolvedState == currentResolvedState
lastResolvedState = currentResolvedState
@post reply, 'reply'
if @canBeRepliedTo()
el NewReply,
currentUser: @props.currentUser
beatmapset: @props.beatmapset
currentBeatmap: @props.currentBeatmap
discussion: @props.discussion
displayVote: (type) =>
vbn = 'beatmap-discussion-vote'
[baseScore, icon] = switch type
when 'up' then [1, 'thumbs-up']
when 'down' then [-1, 'thumbs-down']
return if !baseScore?
currentVote = @props.discussion.current_user_attributes?.vote_score
score = if currentVote == baseScore then 0 else baseScore
topClasses = "#{vbn} #{vbn}--#{type}"
topClasses += " #{vbn}--inactive" if score != 0
disabled = @isOwner() || (type == 'down' && !@canDownvote()) || !@canBeRepliedTo()
button
className: topClasses
'data-score': score
disabled: disabled
onClick: @doVote
i className: "fas fa-#{icon}"
span className: "#{vbn}__count",
@props.discussion.votes[type]
voterList: (type) =>
div
className: "user-list-popup user-list-popup__template js-user-list-popup--#{@props.discussion.id}-#{type}"
style:
display: 'none'
if @props.discussion.votes[type] < 1
osu.trans "beatmaps.discussions.votes.none.#{type}"
else
el React.Fragment, null,
div className: 'user-list-popup__title',
osu.trans("beatmaps.discussions.votes.latest.#{type}")
':'
@props.discussion.votes['voters'][type].map (userId) =>
a
href: laroute.route('users.show', user: userId)
className: 'js-usercard user-list-popup__user'
key: userId
'data-user-id': userId
el UserAvatar, user: @props.users[userId] ? [], modifiers: ['full']
if @props.discussion.votes[type] > @props.discussion.votes['voters'][type].length
div className: 'user-list-popup__remainder-count',
osu.transChoice 'common.count.plus_others', @props.discussion.votes[type] - @props.discussion.votes['voters'][type].length
getTooltipContent: (type) =>
$(".js-user-list-popup--#{@props.discussion.id}-#{type}").html()
refreshTooltip: (api, type) =>
return unless api
api.set('content.text', @getTooltipContent(type))
showVoters: (event) =>
target = event.currentTarget
if @props.favcount < 1 || target._tooltip
return
target._tooltip = true
type = target.getAttribute('type')
@tooltips[type] =
$(target).qtip
style:
classes: 'user-list-popup'
def: false
tip: false
content:
text: (event, api) => @getTooltipContent(type)
position:
at: 'top center'
my: 'bottom center'
viewport: $(window)
show:
delay: 100
ready: true
solo: true
effect: -> $(this).fadeTo(110, 1)
hide:
fixed: true
delay: 500
effect: -> $(this).fadeTo(250, 0)
doVote: (e) =>
LoadingOverlay.show()
@voteXhr?.abort()
@voteXhr = $.ajax laroute.route('beatmap-discussions.vote', beatmap_discussion: @props.discussion.id),
method: 'PUT',
data:
beatmap_discussion_vote:
score: e.currentTarget.dataset.score
.done (data) =>
$.publish 'beatmapsetDiscussions:update', beatmapset: data
.fail osu.ajaxError
.always LoadingOverlay.hide
emitSetHighlight: =>
$.publish 'beatmapDiscussionEntry:highlight', id: @props.discussion.id
isOwner: (object = @props.discussion) =>
@props.currentUser.id? && object.user_id == @props.currentUser.id
isVisible: (object) =>
object? && (@props.showDeleted || !object.deleted_at?)
canDownvote: =>
@props.currentUser.is_admin || @props.currentUser.is_moderator || @props.currentUser.is_bng
canBeRepliedTo: =>
(!@props.beatmapset.discussion_locked || BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)) &&
(!@props.discussion.beatmap_id? || !@props.currentBeatmap.deleted_at?)
post: (post, type, hideUserCard) =>
return if !post.id?
elementName = if post.system then SystemPost else Post
canModeratePosts = BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)
canBeEdited = @isOwner(post) && post.id > @resolvedSystemPostId() && !@props.beatmapset.discussion_locked
canBeDeleted =
if type == 'discussion'
@props.discussion.current_user_attributes?.can_destroy
else
canModeratePosts || canBeEdited
el elementName,
key: post.id
beatmapset: @props.beatmapset
beatmap: @props.currentBeatmap
discussion: @props.discussion
post: post
type: type
read: _.includes(@props.readPostIds, post.id) || @isOwner(post) || @props.preview
users: @props.users
user: @props.users[post.user_id]
lastEditor: @props.users[post.last_editor_id]
canBeEdited: @props.currentUser.is_admin || canBeEdited
canBeDeleted: canBeDeleted
canBeRestored: canModeratePosts
currentUser: @props.currentUser
hideUserCard: hideUserCard
resolvedSystemPostId: =>
if !@_resolvedSystemPostId?
systemPost = _.findLast(@props.discussion.posts, (post) -> post.system && post.message.type == 'resolved')
@_resolvedSystemPostId = systemPost?.id ? -1
return @_resolvedSystemPostId
setCollapse: (_e, {collapse}) =>
return unless @props.visible
newState = collapse == 'collapse'
return if @state.collapsed == newState
@setState collapsed: newState
setHighlight: (_e, {id}) =>
newState = id == @props.discussion.id
return if @state.highlighted == newState
@setState highlighted: newState
timestamp: =>
tbn = 'beatmap-discussion-timestamp'
div className: tbn,
div(className: "#{tbn}__point") if @props.discussion.timestamp? && @props.isTimelineVisible
div className: "#{tbn}__icons-container",
div className: "#{tbn}__icons",
div className: "#{tbn}__icon",
span
className: "beatmap-discussion-message-type beatmap-discussion-message-type--#{_.kebabCase(@props.discussion.message_type)}"
i className: BeatmapDiscussionHelper.messageType.icon[_.camelCase(@props.discussion.message_type)]
if @props.discussion.resolved
div className: "#{tbn}__icon #{tbn}__icon--resolved",
i className: 'far fa-check-circle'
div className: "#{tbn}__text",
BeatmapDiscussionHelper.formatTimestamp @props.discussion.timestamp
toggleExpand: =>
@setState collapsed: !@state.collapsed
| 161651 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { NewReply } from './new-reply'
import { Post } from './post'
import { SystemPost } from './system-post'
import { UserCard } from './user-card'
import mapperGroup from 'beatmap-discussions/mapper-group'
import * as React from 'react'
import { button, div, i, span, a } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
bn = 'beatmap-discussion'
export class Discussion extends React.PureComponent
constructor: (props) ->
super props
@eventId = "beatmap-discussion-entry-#{@props.discussion.id}"
@tooltips = {}
@state =
collapsed: false
highlighted: false
componentWillMount: =>
$.subscribe "beatmapDiscussionEntry:collapse.#{@eventId}", @setCollapse
$.subscribe "beatmapDiscussionEntry:highlight.#{@eventId}", @setHighlight
componentWillUnmount: =>
$.unsubscribe ".#{@eventId}"
@voteXhr?.abort()
componentDidUpdate: =>
_.each @tooltips, (tooltip, type) =>
@refreshTooltip(tooltip.qtip('api'), type)
render: =>
return null if !@isVisible(@props.discussion)
return null if !@props.discussion.starting_post && (!@props.discussion.posts || @props.discussion.posts.length == 0)
topClasses = "#{bn} js-beatmap-discussion-jump"
topClasses += " #{bn}--highlighted" if @state.highlighted
topClasses += " #{bn}--deleted" if @props.discussion.deleted_at?
topClasses += " #{bn}--timeline" if @props.discussion.timestamp?
topClasses += " #{bn}--preview" if @props.preview
topClasses += " #{bn}--review" if @props.discussion.message_type == 'review'
lineClasses = "#{bn}__line"
lineClasses += " #{bn}__line--resolved" if @props.discussion.resolved
lastResolvedState = false
@_resolvedSystemPostId = null
firstPost = @props.discussion.starting_post || @props.discussion.posts[0]
user = @props.users[@props.discussion.user_id]
badge = if user.id == @props.beatmapset.user_id then mapperGroup else user.group_badge
topClasses += " #{bn}--unread" unless _.includes(@props.readPostIds, firstPost.id) || @isOwner(firstPost) || @props.preview
div
className: topClasses
'data-id': @props.discussion.id
onClick: @emitSetHighlight
div className: "#{bn}__timestamp hidden-xs",
@timestamp()
div className: "#{bn}__compact",
div className: "#{bn}__discussion",
div
className: "#{bn}__top"
style:
color: osu.groupColour(badge)
div className: "#{bn}__discussion-header",
el UserCard,
user: user
badge: badge
hideStripe: true
@postButtons() if !@props.preview
div className: "#{bn}__review-wrapper",
@post firstPost, 'discussion', true
@postFooter() if !@props.preview
div className: lineClasses
div className: "#{bn}__full",
div className: "#{bn}__discussion",
div className: "#{bn}__top",
@post firstPost, 'discussion'
@postButtons() if !@props.preview
@postFooter() if !@props.preview
div className: lineClasses
postButtons: =>
div className: "#{bn}__actions-container",
div className: "#{bn}__actions",
if @props.parentDiscussion?
a
href: BeatmapDiscussionHelper.url({discussion: @props.parentDiscussion})
title: osu.trans('beatmap_discussions.review.go_to_parent')
className: "#{bn}__link-to-parent",
i className: 'fas fa-tasks'
['up', 'down'].map (type) =>
div
key: type
type: type
className: "#{bn}__action"
onMouseOver: @showVoters
onTouchStart: @showVoters
@displayVote type
@voterList type
button
className: "#{bn}__action #{bn}__action--with-line"
onClick: @toggleExpand
div
className: "beatmap-discussion-expand #{'beatmap-discussion-expand--expanded' if !@state.collapsed}"
i className: 'fas fa-chevron-down'
postFooter: =>
div
className: "#{bn}__expanded #{'hidden' if @state.collapsed}"
div
className: "#{bn}__replies"
for reply in @props.discussion.posts.slice(1)
continue unless @isVisible(reply)
if reply.system && reply.message.type == 'resolved'
currentResolvedState = reply.message.value
continue if lastResolvedState == currentResolvedState
lastResolvedState = currentResolvedState
@post reply, 'reply'
if @canBeRepliedTo()
el NewReply,
currentUser: @props.currentUser
beatmapset: @props.beatmapset
currentBeatmap: @props.currentBeatmap
discussion: @props.discussion
displayVote: (type) =>
vbn = 'beatmap-discussion-vote'
[baseScore, icon] = switch type
when 'up' then [1, 'thumbs-up']
when 'down' then [-1, 'thumbs-down']
return if !baseScore?
currentVote = @props.discussion.current_user_attributes?.vote_score
score = if currentVote == baseScore then 0 else baseScore
topClasses = "#{vbn} #{vbn}--#{type}"
topClasses += " #{vbn}--inactive" if score != 0
disabled = @isOwner() || (type == 'down' && !@canDownvote()) || !@canBeRepliedTo()
button
className: topClasses
'data-score': score
disabled: disabled
onClick: @doVote
i className: "fas fa-#{icon}"
span className: "#{vbn}__count",
@props.discussion.votes[type]
voterList: (type) =>
div
className: "user-list-popup user-list-popup__template js-user-list-popup--#{@props.discussion.id}-#{type}"
style:
display: 'none'
if @props.discussion.votes[type] < 1
osu.trans "beatmaps.discussions.votes.none.#{type}"
else
el React.Fragment, null,
div className: 'user-list-popup__title',
osu.trans("beatmaps.discussions.votes.latest.#{type}")
':'
@props.discussion.votes['voters'][type].map (userId) =>
a
href: laroute.route('users.show', user: userId)
className: 'js-usercard user-list-popup__user'
key: userId
'data-user-id': userId
el UserAvatar, user: @props.users[userId] ? [], modifiers: ['full']
if @props.discussion.votes[type] > @props.discussion.votes['voters'][type].length
div className: 'user-list-popup__remainder-count',
osu.transChoice 'common.count.plus_others', @props.discussion.votes[type] - @props.discussion.votes['voters'][type].length
getTooltipContent: (type) =>
$(".js-user-list-popup--#{@props.discussion.id}-#{type}").html()
refreshTooltip: (api, type) =>
return unless api
api.set('content.text', @getTooltipContent(type))
showVoters: (event) =>
target = event.currentTarget
if @props.favcount < 1 || target._tooltip
return
target._tooltip = true
type = target.getAttribute('type')
@tooltips[type] =
$(target).qtip
style:
classes: 'user-list-popup'
def: false
tip: false
content:
text: (event, api) => @getTooltipContent(type)
position:
at: 'top center'
my: 'bottom center'
viewport: $(window)
show:
delay: 100
ready: true
solo: true
effect: -> $(this).fadeTo(110, 1)
hide:
fixed: true
delay: 500
effect: -> $(this).fadeTo(250, 0)
doVote: (e) =>
LoadingOverlay.show()
@voteXhr?.abort()
@voteXhr = $.ajax laroute.route('beatmap-discussions.vote', beatmap_discussion: @props.discussion.id),
method: 'PUT',
data:
beatmap_discussion_vote:
score: e.currentTarget.dataset.score
.done (data) =>
$.publish 'beatmapsetDiscussions:update', beatmapset: data
.fail osu.ajaxError
.always LoadingOverlay.hide
emitSetHighlight: =>
$.publish 'beatmapDiscussionEntry:highlight', id: @props.discussion.id
isOwner: (object = @props.discussion) =>
@props.currentUser.id? && object.user_id == @props.currentUser.id
isVisible: (object) =>
object? && (@props.showDeleted || !object.deleted_at?)
canDownvote: =>
@props.currentUser.is_admin || @props.currentUser.is_moderator || @props.currentUser.is_bng
canBeRepliedTo: =>
(!@props.beatmapset.discussion_locked || BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)) &&
(!@props.discussion.beatmap_id? || !@props.currentBeatmap.deleted_at?)
post: (post, type, hideUserCard) =>
return if !post.id?
elementName = if post.system then SystemPost else Post
canModeratePosts = BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)
canBeEdited = @isOwner(post) && post.id > @resolvedSystemPostId() && !@props.beatmapset.discussion_locked
canBeDeleted =
if type == 'discussion'
@props.discussion.current_user_attributes?.can_destroy
else
canModeratePosts || canBeEdited
el elementName,
key: post.id
beatmapset: @props.beatmapset
beatmap: @props.currentBeatmap
discussion: @props.discussion
post: post
type: type
read: _.includes(@props.readPostIds, post.id) || @isOwner(post) || @props.preview
users: @props.users
user: @props.users[post.user_id]
lastEditor: @props.users[post.last_editor_id]
canBeEdited: @props.currentUser.is_admin || canBeEdited
canBeDeleted: canBeDeleted
canBeRestored: canModeratePosts
currentUser: @props.currentUser
hideUserCard: hideUserCard
resolvedSystemPostId: =>
if !@_resolvedSystemPostId?
systemPost = _.findLast(@props.discussion.posts, (post) -> post.system && post.message.type == 'resolved')
@_resolvedSystemPostId = systemPost?.id ? -1
return @_resolvedSystemPostId
setCollapse: (_e, {collapse}) =>
return unless @props.visible
newState = collapse == 'collapse'
return if @state.collapsed == newState
@setState collapsed: newState
setHighlight: (_e, {id}) =>
newState = id == @props.discussion.id
return if @state.highlighted == newState
@setState highlighted: newState
timestamp: =>
tbn = 'beatmap-discussion-timestamp'
div className: tbn,
div(className: "#{tbn}__point") if @props.discussion.timestamp? && @props.isTimelineVisible
div className: "#{tbn}__icons-container",
div className: "#{tbn}__icons",
div className: "#{tbn}__icon",
span
className: "beatmap-discussion-message-type beatmap-discussion-message-type--#{_.kebabCase(@props.discussion.message_type)}"
i className: BeatmapDiscussionHelper.messageType.icon[_.camelCase(@props.discussion.message_type)]
if @props.discussion.resolved
div className: "#{tbn}__icon #{tbn}__icon--resolved",
i className: 'far fa-check-circle'
div className: "#{tbn}__text",
BeatmapDiscussionHelper.formatTimestamp @props.discussion.timestamp
toggleExpand: =>
@setState collapsed: !@state.collapsed
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { NewReply } from './new-reply'
import { Post } from './post'
import { SystemPost } from './system-post'
import { UserCard } from './user-card'
import mapperGroup from 'beatmap-discussions/mapper-group'
import * as React from 'react'
import { button, div, i, span, a } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
bn = 'beatmap-discussion'
export class Discussion extends React.PureComponent
constructor: (props) ->
super props
@eventId = "beatmap-discussion-entry-#{@props.discussion.id}"
@tooltips = {}
@state =
collapsed: false
highlighted: false
componentWillMount: =>
$.subscribe "beatmapDiscussionEntry:collapse.#{@eventId}", @setCollapse
$.subscribe "beatmapDiscussionEntry:highlight.#{@eventId}", @setHighlight
componentWillUnmount: =>
$.unsubscribe ".#{@eventId}"
@voteXhr?.abort()
componentDidUpdate: =>
_.each @tooltips, (tooltip, type) =>
@refreshTooltip(tooltip.qtip('api'), type)
render: =>
return null if !@isVisible(@props.discussion)
return null if !@props.discussion.starting_post && (!@props.discussion.posts || @props.discussion.posts.length == 0)
topClasses = "#{bn} js-beatmap-discussion-jump"
topClasses += " #{bn}--highlighted" if @state.highlighted
topClasses += " #{bn}--deleted" if @props.discussion.deleted_at?
topClasses += " #{bn}--timeline" if @props.discussion.timestamp?
topClasses += " #{bn}--preview" if @props.preview
topClasses += " #{bn}--review" if @props.discussion.message_type == 'review'
lineClasses = "#{bn}__line"
lineClasses += " #{bn}__line--resolved" if @props.discussion.resolved
lastResolvedState = false
@_resolvedSystemPostId = null
firstPost = @props.discussion.starting_post || @props.discussion.posts[0]
user = @props.users[@props.discussion.user_id]
badge = if user.id == @props.beatmapset.user_id then mapperGroup else user.group_badge
topClasses += " #{bn}--unread" unless _.includes(@props.readPostIds, firstPost.id) || @isOwner(firstPost) || @props.preview
div
className: topClasses
'data-id': @props.discussion.id
onClick: @emitSetHighlight
div className: "#{bn}__timestamp hidden-xs",
@timestamp()
div className: "#{bn}__compact",
div className: "#{bn}__discussion",
div
className: "#{bn}__top"
style:
color: osu.groupColour(badge)
div className: "#{bn}__discussion-header",
el UserCard,
user: user
badge: badge
hideStripe: true
@postButtons() if !@props.preview
div className: "#{bn}__review-wrapper",
@post firstPost, 'discussion', true
@postFooter() if !@props.preview
div className: lineClasses
div className: "#{bn}__full",
div className: "#{bn}__discussion",
div className: "#{bn}__top",
@post firstPost, 'discussion'
@postButtons() if !@props.preview
@postFooter() if !@props.preview
div className: lineClasses
postButtons: =>
div className: "#{bn}__actions-container",
div className: "#{bn}__actions",
if @props.parentDiscussion?
a
href: BeatmapDiscussionHelper.url({discussion: @props.parentDiscussion})
title: osu.trans('beatmap_discussions.review.go_to_parent')
className: "#{bn}__link-to-parent",
i className: 'fas fa-tasks'
['up', 'down'].map (type) =>
div
key: type
type: type
className: "#{bn}__action"
onMouseOver: @showVoters
onTouchStart: @showVoters
@displayVote type
@voterList type
button
className: "#{bn}__action #{bn}__action--with-line"
onClick: @toggleExpand
div
className: "beatmap-discussion-expand #{'beatmap-discussion-expand--expanded' if !@state.collapsed}"
i className: 'fas fa-chevron-down'
postFooter: =>
div
className: "#{bn}__expanded #{'hidden' if @state.collapsed}"
div
className: "#{bn}__replies"
for reply in @props.discussion.posts.slice(1)
continue unless @isVisible(reply)
if reply.system && reply.message.type == 'resolved'
currentResolvedState = reply.message.value
continue if lastResolvedState == currentResolvedState
lastResolvedState = currentResolvedState
@post reply, 'reply'
if @canBeRepliedTo()
el NewReply,
currentUser: @props.currentUser
beatmapset: @props.beatmapset
currentBeatmap: @props.currentBeatmap
discussion: @props.discussion
displayVote: (type) =>
vbn = 'beatmap-discussion-vote'
[baseScore, icon] = switch type
when 'up' then [1, 'thumbs-up']
when 'down' then [-1, 'thumbs-down']
return if !baseScore?
currentVote = @props.discussion.current_user_attributes?.vote_score
score = if currentVote == baseScore then 0 else baseScore
topClasses = "#{vbn} #{vbn}--#{type}"
topClasses += " #{vbn}--inactive" if score != 0
disabled = @isOwner() || (type == 'down' && !@canDownvote()) || !@canBeRepliedTo()
button
className: topClasses
'data-score': score
disabled: disabled
onClick: @doVote
i className: "fas fa-#{icon}"
span className: "#{vbn}__count",
@props.discussion.votes[type]
voterList: (type) =>
div
className: "user-list-popup user-list-popup__template js-user-list-popup--#{@props.discussion.id}-#{type}"
style:
display: 'none'
if @props.discussion.votes[type] < 1
osu.trans "beatmaps.discussions.votes.none.#{type}"
else
el React.Fragment, null,
div className: 'user-list-popup__title',
osu.trans("beatmaps.discussions.votes.latest.#{type}")
':'
@props.discussion.votes['voters'][type].map (userId) =>
a
href: laroute.route('users.show', user: userId)
className: 'js-usercard user-list-popup__user'
key: userId
'data-user-id': userId
el UserAvatar, user: @props.users[userId] ? [], modifiers: ['full']
if @props.discussion.votes[type] > @props.discussion.votes['voters'][type].length
div className: 'user-list-popup__remainder-count',
osu.transChoice 'common.count.plus_others', @props.discussion.votes[type] - @props.discussion.votes['voters'][type].length
getTooltipContent: (type) =>
$(".js-user-list-popup--#{@props.discussion.id}-#{type}").html()
refreshTooltip: (api, type) =>
return unless api
api.set('content.text', @getTooltipContent(type))
showVoters: (event) =>
target = event.currentTarget
if @props.favcount < 1 || target._tooltip
return
target._tooltip = true
type = target.getAttribute('type')
@tooltips[type] =
$(target).qtip
style:
classes: 'user-list-popup'
def: false
tip: false
content:
text: (event, api) => @getTooltipContent(type)
position:
at: 'top center'
my: 'bottom center'
viewport: $(window)
show:
delay: 100
ready: true
solo: true
effect: -> $(this).fadeTo(110, 1)
hide:
fixed: true
delay: 500
effect: -> $(this).fadeTo(250, 0)
doVote: (e) =>
LoadingOverlay.show()
@voteXhr?.abort()
@voteXhr = $.ajax laroute.route('beatmap-discussions.vote', beatmap_discussion: @props.discussion.id),
method: 'PUT',
data:
beatmap_discussion_vote:
score: e.currentTarget.dataset.score
.done (data) =>
$.publish 'beatmapsetDiscussions:update', beatmapset: data
.fail osu.ajaxError
.always LoadingOverlay.hide
emitSetHighlight: =>
$.publish 'beatmapDiscussionEntry:highlight', id: @props.discussion.id
isOwner: (object = @props.discussion) =>
@props.currentUser.id? && object.user_id == @props.currentUser.id
isVisible: (object) =>
object? && (@props.showDeleted || !object.deleted_at?)
canDownvote: =>
@props.currentUser.is_admin || @props.currentUser.is_moderator || @props.currentUser.is_bng
canBeRepliedTo: =>
(!@props.beatmapset.discussion_locked || BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)) &&
(!@props.discussion.beatmap_id? || !@props.currentBeatmap.deleted_at?)
post: (post, type, hideUserCard) =>
return if !post.id?
elementName = if post.system then SystemPost else Post
canModeratePosts = BeatmapDiscussionHelper.canModeratePosts(@props.currentUser)
canBeEdited = @isOwner(post) && post.id > @resolvedSystemPostId() && !@props.beatmapset.discussion_locked
canBeDeleted =
if type == 'discussion'
@props.discussion.current_user_attributes?.can_destroy
else
canModeratePosts || canBeEdited
el elementName,
key: post.id
beatmapset: @props.beatmapset
beatmap: @props.currentBeatmap
discussion: @props.discussion
post: post
type: type
read: _.includes(@props.readPostIds, post.id) || @isOwner(post) || @props.preview
users: @props.users
user: @props.users[post.user_id]
lastEditor: @props.users[post.last_editor_id]
canBeEdited: @props.currentUser.is_admin || canBeEdited
canBeDeleted: canBeDeleted
canBeRestored: canModeratePosts
currentUser: @props.currentUser
hideUserCard: hideUserCard
resolvedSystemPostId: =>
if !@_resolvedSystemPostId?
systemPost = _.findLast(@props.discussion.posts, (post) -> post.system && post.message.type == 'resolved')
@_resolvedSystemPostId = systemPost?.id ? -1
return @_resolvedSystemPostId
setCollapse: (_e, {collapse}) =>
return unless @props.visible
newState = collapse == 'collapse'
return if @state.collapsed == newState
@setState collapsed: newState
setHighlight: (_e, {id}) =>
newState = id == @props.discussion.id
return if @state.highlighted == newState
@setState highlighted: newState
timestamp: =>
tbn = 'beatmap-discussion-timestamp'
div className: tbn,
div(className: "#{tbn}__point") if @props.discussion.timestamp? && @props.isTimelineVisible
div className: "#{tbn}__icons-container",
div className: "#{tbn}__icons",
div className: "#{tbn}__icon",
span
className: "beatmap-discussion-message-type beatmap-discussion-message-type--#{_.kebabCase(@props.discussion.message_type)}"
i className: BeatmapDiscussionHelper.messageType.icon[_.camelCase(@props.discussion.message_type)]
if @props.discussion.resolved
div className: "#{tbn}__icon #{tbn}__icon--resolved",
i className: 'far fa-check-circle'
div className: "#{tbn}__text",
BeatmapDiscussionHelper.formatTimestamp @props.discussion.timestamp
toggleExpand: =>
@setState collapsed: !@state.collapsed
|
[
{
"context": " so that it's easy to test non-sandbox mode (use \"127.0.0.1\" instead).\n#\n# You can optionally pass in a hostn",
"end": 9170,
"score": 0.9997642040252686,
"start": 9161,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ndbox/icon.png'\nTabCAT.Console.sandbo... | console/js/tabcat-console.coffee | jhowe-uw/tabcat | 0 | ###
Copyright (c) 2014, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# Functions only used by the TabCAT console, and only available from
# there. Used to be in TabCAT.UI.
@TabCAT ?= {}
TabCAT.Console = {}
# INITIALIZATION
# default fallback language, for i18n
DEFAULT_FALLBACK_LNG = 'en'
# call this first. Analogous to TabCAT.Task.start()
#
# this sets up i18n, starts sync of spilled docs, and updates
# the status bar (once the page is ready)
TabCAT.Console.start = _.once((options) ->
# set up i18n
i18n_options = _.extend(
{fallbackLng: DEFAULT_FALLBACK_LNG, resStore: {}},
options?.i18n)
$.i18n.init(i18n_options)
# sync spilled docs
TabCAT.DB.startSpilledDocSync()
# update status bar
$(TabCAT.Console.updateStatusBar)
)
# STATUS BAR
# warn when local storage is more than 75% full
# typical tasks use 0.5% of browser storage
LOCAL_STORAGE_WARNING_THRESHOLD = 75
# keep status messages for at least a second
OFFLINE_STATUS_MIN_CHANGE_TIME = 2000
# DB where design docs and task content is stored
TABCAT_DB = 'tabcat'
keepOfflineStatusUntil = null
lastOfflineStatusType = 0
# update the statusBar div, populating it if necessary
#
# this also implicitly unsets patientHasDevice
#
# this is also responsible for swapping in updated versions of the
# application cache (Android browser seems to need this)
TabCAT.Console.updateStatusBar = ->
TabCAT.Task.patientHasDevice(false)
$statusBar = $('#statusBar')
if TabCAT.Console.inSandbox()
$statusBar.addClass('sandbox')
# populate with new HTML if we didn't already
if $statusBar.find('div.left').length is 0
$statusBar.html(
"""
<div class="left">
<span class="banner"></span>
<span class="version"></span>
<p class="offline"></p>
</div>
<div class="right">
<p class="email"> </p>
<button class="login" style="display:none"></button>
<button class="languageButton" id="englishButton">English</button>
<button class="languageButton" id="spanishButton">Spanish</button>
</div>
<div class="center">
<p class="encounter"></p>
<p class="clock"></p>
</div>
"""
)
#temporary way to switch between english and spanish
$('#englishButton').touchdown =>
window.localStorage.currentLanguage = 'en'
$('#spanishButton').touchdown =>
window.localStorage.currentLanguage = 'es'
$statusBar.find('.version').text(TabCAT.version)
$statusBar.find('button.login').on('click', (event) ->
button = $(event.target)
if button.text() == 'Log Out'
$emailP = $statusBar.find('p.email')
# do something even if logout is slow
oldContents = $emailP.html()
$statusBar.find('p.email').text('Logging out...')
TabCAT.UI.logout()
# this only happens if user decides not to log out
$emailP.html(oldContents)
else
TabCAT.UI.requestLogin()
)
$emailP = $statusBar.find('p.email')
$button = $statusBar.find('button.login')
$encounterP = $statusBar.find('p.encounter')
user = TabCAT.User.get()
if user?
$emailP.text(user)
$button.text('Log Out')
else
$emailP.text('not logged in')
$button.text('Log In')
$button.show()
# only check offline status occasionally
updateOfflineStatus()
TabCAT.Console.updateStatusBar.offlineInterval = window.setInterval(
updateOfflineStatus, 500)
# don't show encounter info unless patient is logged in
patientCode = TabCAT.Encounter.getPatientCode()
if patientCode? and user?
encounterNum = TabCAT.Encounter.getNum()
encounterNumText = if encounterNum? then ' #' + encounterNum else ''
$encounterP.text(
'Encounter' + encounterNumText + ' with Patient ' + patientCode)
if not TabCAT.Console.updateStatusBar.clockInterval?
TabCAT.Console.updateStatusBar.clockInterval = window.setInterval(
updateEncounterClock, 50)
else
$encounterP.empty()
if TabCAT.Console.updateStatusBar.clockInterval?
window.clearInterval(TabCAT.Console.updateStatusBar.clockInterval)
$statusBar.find('p.clock').empty()
# update the encounter clock on the statusBar
updateEncounterClock = ->
# handle end of encounter gracefully
if TabCAT.Encounter.isOpen()
now = TabCAT.Clock.now()
seconds = Math.floor(now / 1000) % 60
if seconds < 10
seconds = '0' + seconds
minutes = Math.floor(now / 60000) % 60
if minutes < 10
minutes = '0' + minutes
hours = Math.floor(now / 3600000)
time = hours + ':' + minutes + ':' + seconds
$('#statusBar p.clock').text(time)
else
$('#statusBar p.clock').empty()
# update the offline status on the statusBar, while attempting not
# to flicker status messages so quickly that we can't read them
updateOfflineStatus = ->
now = $.now()
[statusType, statusHtml] = offlineStatusTypeAndHtml()
if (keepOfflineStatusUntil? and now < keepOfflineStatusUntil \
and statusType isnt lastOfflineStatusType)
return
# don't bother holding blank message for a second
if statusHtml
lastOfflineStatusType = statusType
keepOfflineStatusUntil = now + OFFLINE_STATUS_MIN_CHANGE_TIME
$('#statusBar').find('p.offline').html(statusHtml)
# return the type of offline status and html to display.
#
# This also swaps in an updated application cache, if necessary
offlineStatusTypeAndHtml = ->
now = $.now()
appcache = window.applicationCache
# if there's an updated version of the cache ready, swap it in
if appcache.status is appcache.UPDATEREADY
appcache.swapCache()
if navigator.onLine is false
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [1, '<span class="warning">PLEASE CONNECT TO NETWORK</span>']
else
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [2, 'OFFLINE MODE (storage ' + percentFullHtml + ')']
else
return [2, 'OFFLINE MODE']
if appcache.status is appcache.DOWNLOADING
return [3, 'loading content for offline mode']
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [4, '<span class="warning">offline mode unavailable</span>']
# not exactly offline, but can't sync (maybe wrong network?)
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [5, 'offline storage ' + percentFullHtml]
return [0, '']
# helper for offlineStatusHtml(). returns "#.#% full" plus markup
offlineStatusStoragePercentFullHtml = ->
if not TabCAT.DB.spilledDocsRemain()
return ''
percentFull = TabCAT.DB.percentOfLocalStorageUsed()
percentFullHtml = Math.min(percentFull, 100).toFixed(1) + '% full'
if percentFull >= LOCAL_STORAGE_WARNING_THRESHOLD
percentFullHtml = '<span class="warning">' + percentFullHtml + '</span>'
return percentFullHtml
TabCAT.Console.DEFAULT_TASK_ICON_URL = (
"/#{TABCAT_DB}/_design/console/img/icon.png")
# extract icon URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskIconUrl = (task) ->
if task.designDocId? and task.icon?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.icon}"
else
DEFAULT_TASK_ICON_URL
# extract start URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskStartUrl = (task) ->
if task.designDocId? and task.start?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.start}"
# used by inSandbox()
SANDBOX_REGEX = \
/(sandbox|^\d+\.\d+\.\d+\.\d+$)/i
# Infer from the hostname whether we're in sandbox mode. This happens if it
# contains "sandbox" or is an IP address.
#
# Sandbox mode is meant to only affect the UI: different warning messages,
# pre-filled form inputs, etc.
#
# We intentially don't do anything for the hostname "localhost"
# so that it's easy to test non-sandbox mode (use "127.0.0.1" instead).
#
# You can optionally pass in a hostname (by default we use
# window.location.hostname).
TabCAT.Console.inSandbox = (hostname) ->
SANDBOX_REGEX.test(hostname ? window.location.hostname)
# constants for sandbox mode
TabCAT.Console.sandboxIcon = 'img/sandbox/icon.png'
TabCAT.Console.sandboxPassword = 's@ndbox'
TabCAT.Console.sandboxTitle = 'TabCAT Sandbox'
TabCAT.Console.sandboxUser = 's@ndbox'
# displaying scores
SCORE_HTML = '''
<div class="score">
<div class="scoreHeader">
<span class="description"></span>
</div>
<div class="scoreBody">
<div class="rawScore">
<p class="description">Raw Score</p>
<p class="value"></p>
</div>
<div class="catchTrialScore">
<p class="description">Catch Trial Score</p>
<p class="value"></p>
</div>
<div class="norms">
<table class="norm">
<thead>
<tr>
<th class="age">Age</th>
<th class="mean">Mean</th>
<th class="stddev">Std. Dev.</th>
<th class="percentile">Percentile</th>
</tr>
</thead>
<tbody>
</tbody>
</table>
</div>
</div>
</div>
'''
NORM_HTML = '''
<tr>
<td class="age"></td>
<td class="mean"></td>
<td class="stddev"></td>
<td class="percentile"></td>
</tr>
'''
TabCAT.Console.populateWithScores = ($scoresDiv, scores) ->
$scoresDiv.empty()
for score in scores
$score = $(SCORE_HTML)
$score.find('.scoreHeader .description').text(
score.description)
scoreValue = score.value
if typeof scoreValue == "number"
scoreValue = scoreValue.toFixed(1)
$score.find('.scoreBody .rawScore .value').text(
scoreValue)
if score.norms?
for norm in score.norms
$norm = $(NORM_HTML)
minAge = norm.cohort?.minAge ? 0
if norm.cohort?.maxAge?
age = minAge + '-' + norm.cohort.maxAge
else
age = minAge + '+'
$norm.find('.age').text(age)
$norm.find('.mean').text(norm.mean ? '-')
$norm.find('.stddev').text(norm.stddev ? '-')
percentile = norm.percentile
# infer percentile
if (not percentile?) and norm.mean? and norm.stddev
g = gaussian(norm.mean, norm.stddev * norm.stddev)
percentile = 100 * g.cdf(score.value)
if score.lessIsMore
percentile = 100 - percentile
if percentile?
percentile = Math.floor(percentile) + '%'
$norm.find('.percentile').text(percentile ? '-')
$score.find('.scoreBody .norms tbody').append($norm)
catchTrialScore = "N/A"
if score.catchTrialsScore?
catchTrialScore = parseInt(score.catchTrialsScore) + '%'
$score.find('.scoreBody .catchTrialScore .value').text( \
catchTrialScore)
$scoresDiv.append($score)
| 120864 | ###
Copyright (c) 2014, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# Functions only used by the TabCAT console, and only available from
# there. Used to be in TabCAT.UI.
@TabCAT ?= {}
TabCAT.Console = {}
# INITIALIZATION
# default fallback language, for i18n
DEFAULT_FALLBACK_LNG = 'en'
# call this first. Analogous to TabCAT.Task.start()
#
# this sets up i18n, starts sync of spilled docs, and updates
# the status bar (once the page is ready)
TabCAT.Console.start = _.once((options) ->
# set up i18n
i18n_options = _.extend(
{fallbackLng: DEFAULT_FALLBACK_LNG, resStore: {}},
options?.i18n)
$.i18n.init(i18n_options)
# sync spilled docs
TabCAT.DB.startSpilledDocSync()
# update status bar
$(TabCAT.Console.updateStatusBar)
)
# STATUS BAR
# warn when local storage is more than 75% full
# typical tasks use 0.5% of browser storage
LOCAL_STORAGE_WARNING_THRESHOLD = 75
# keep status messages for at least a second
OFFLINE_STATUS_MIN_CHANGE_TIME = 2000
# DB where design docs and task content is stored
TABCAT_DB = 'tabcat'
keepOfflineStatusUntil = null
lastOfflineStatusType = 0
# update the statusBar div, populating it if necessary
#
# this also implicitly unsets patientHasDevice
#
# this is also responsible for swapping in updated versions of the
# application cache (Android browser seems to need this)
TabCAT.Console.updateStatusBar = ->
TabCAT.Task.patientHasDevice(false)
$statusBar = $('#statusBar')
if TabCAT.Console.inSandbox()
$statusBar.addClass('sandbox')
# populate with new HTML if we didn't already
if $statusBar.find('div.left').length is 0
$statusBar.html(
"""
<div class="left">
<span class="banner"></span>
<span class="version"></span>
<p class="offline"></p>
</div>
<div class="right">
<p class="email"> </p>
<button class="login" style="display:none"></button>
<button class="languageButton" id="englishButton">English</button>
<button class="languageButton" id="spanishButton">Spanish</button>
</div>
<div class="center">
<p class="encounter"></p>
<p class="clock"></p>
</div>
"""
)
#temporary way to switch between english and spanish
$('#englishButton').touchdown =>
window.localStorage.currentLanguage = 'en'
$('#spanishButton').touchdown =>
window.localStorage.currentLanguage = 'es'
$statusBar.find('.version').text(TabCAT.version)
$statusBar.find('button.login').on('click', (event) ->
button = $(event.target)
if button.text() == 'Log Out'
$emailP = $statusBar.find('p.email')
# do something even if logout is slow
oldContents = $emailP.html()
$statusBar.find('p.email').text('Logging out...')
TabCAT.UI.logout()
# this only happens if user decides not to log out
$emailP.html(oldContents)
else
TabCAT.UI.requestLogin()
)
$emailP = $statusBar.find('p.email')
$button = $statusBar.find('button.login')
$encounterP = $statusBar.find('p.encounter')
user = TabCAT.User.get()
if user?
$emailP.text(user)
$button.text('Log Out')
else
$emailP.text('not logged in')
$button.text('Log In')
$button.show()
# only check offline status occasionally
updateOfflineStatus()
TabCAT.Console.updateStatusBar.offlineInterval = window.setInterval(
updateOfflineStatus, 500)
# don't show encounter info unless patient is logged in
patientCode = TabCAT.Encounter.getPatientCode()
if patientCode? and user?
encounterNum = TabCAT.Encounter.getNum()
encounterNumText = if encounterNum? then ' #' + encounterNum else ''
$encounterP.text(
'Encounter' + encounterNumText + ' with Patient ' + patientCode)
if not TabCAT.Console.updateStatusBar.clockInterval?
TabCAT.Console.updateStatusBar.clockInterval = window.setInterval(
updateEncounterClock, 50)
else
$encounterP.empty()
if TabCAT.Console.updateStatusBar.clockInterval?
window.clearInterval(TabCAT.Console.updateStatusBar.clockInterval)
$statusBar.find('p.clock').empty()
# update the encounter clock on the statusBar
updateEncounterClock = ->
# handle end of encounter gracefully
if TabCAT.Encounter.isOpen()
now = TabCAT.Clock.now()
seconds = Math.floor(now / 1000) % 60
if seconds < 10
seconds = '0' + seconds
minutes = Math.floor(now / 60000) % 60
if minutes < 10
minutes = '0' + minutes
hours = Math.floor(now / 3600000)
time = hours + ':' + minutes + ':' + seconds
$('#statusBar p.clock').text(time)
else
$('#statusBar p.clock').empty()
# update the offline status on the statusBar, while attempting not
# to flicker status messages so quickly that we can't read them
updateOfflineStatus = ->
now = $.now()
[statusType, statusHtml] = offlineStatusTypeAndHtml()
if (keepOfflineStatusUntil? and now < keepOfflineStatusUntil \
and statusType isnt lastOfflineStatusType)
return
# don't bother holding blank message for a second
if statusHtml
lastOfflineStatusType = statusType
keepOfflineStatusUntil = now + OFFLINE_STATUS_MIN_CHANGE_TIME
$('#statusBar').find('p.offline').html(statusHtml)
# return the type of offline status and html to display.
#
# This also swaps in an updated application cache, if necessary
offlineStatusTypeAndHtml = ->
now = $.now()
appcache = window.applicationCache
# if there's an updated version of the cache ready, swap it in
if appcache.status is appcache.UPDATEREADY
appcache.swapCache()
if navigator.onLine is false
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [1, '<span class="warning">PLEASE CONNECT TO NETWORK</span>']
else
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [2, 'OFFLINE MODE (storage ' + percentFullHtml + ')']
else
return [2, 'OFFLINE MODE']
if appcache.status is appcache.DOWNLOADING
return [3, 'loading content for offline mode']
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [4, '<span class="warning">offline mode unavailable</span>']
# not exactly offline, but can't sync (maybe wrong network?)
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [5, 'offline storage ' + percentFullHtml]
return [0, '']
# helper for offlineStatusHtml(). returns "#.#% full" plus markup
offlineStatusStoragePercentFullHtml = ->
if not TabCAT.DB.spilledDocsRemain()
return ''
percentFull = TabCAT.DB.percentOfLocalStorageUsed()
percentFullHtml = Math.min(percentFull, 100).toFixed(1) + '% full'
if percentFull >= LOCAL_STORAGE_WARNING_THRESHOLD
percentFullHtml = '<span class="warning">' + percentFullHtml + '</span>'
return percentFullHtml
TabCAT.Console.DEFAULT_TASK_ICON_URL = (
"/#{TABCAT_DB}/_design/console/img/icon.png")
# extract icon URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskIconUrl = (task) ->
if task.designDocId? and task.icon?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.icon}"
else
DEFAULT_TASK_ICON_URL
# extract start URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskStartUrl = (task) ->
if task.designDocId? and task.start?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.start}"
# used by inSandbox()
SANDBOX_REGEX = \
/(sandbox|^\d+\.\d+\.\d+\.\d+$)/i
# Infer from the hostname whether we're in sandbox mode. This happens if it
# contains "sandbox" or is an IP address.
#
# Sandbox mode is meant to only affect the UI: different warning messages,
# pre-filled form inputs, etc.
#
# We intentially don't do anything for the hostname "localhost"
# so that it's easy to test non-sandbox mode (use "127.0.0.1" instead).
#
# You can optionally pass in a hostname (by default we use
# window.location.hostname).
TabCAT.Console.inSandbox = (hostname) ->
SANDBOX_REGEX.test(hostname ? window.location.hostname)
# constants for sandbox mode
TabCAT.Console.sandboxIcon = 'img/sandbox/icon.png'
TabCAT.Console.sandboxPassword = '<PASSWORD>'
TabCAT.Console.sandboxTitle = 'TabCAT Sandbox'
TabCAT.Console.sandboxUser = 's@ndbox'
# displaying scores
SCORE_HTML = '''
<div class="score">
<div class="scoreHeader">
<span class="description"></span>
</div>
<div class="scoreBody">
<div class="rawScore">
<p class="description">Raw Score</p>
<p class="value"></p>
</div>
<div class="catchTrialScore">
<p class="description">Catch Trial Score</p>
<p class="value"></p>
</div>
<div class="norms">
<table class="norm">
<thead>
<tr>
<th class="age">Age</th>
<th class="mean">Mean</th>
<th class="stddev">Std. Dev.</th>
<th class="percentile">Percentile</th>
</tr>
</thead>
<tbody>
</tbody>
</table>
</div>
</div>
</div>
'''
NORM_HTML = '''
<tr>
<td class="age"></td>
<td class="mean"></td>
<td class="stddev"></td>
<td class="percentile"></td>
</tr>
'''
TabCAT.Console.populateWithScores = ($scoresDiv, scores) ->
$scoresDiv.empty()
for score in scores
$score = $(SCORE_HTML)
$score.find('.scoreHeader .description').text(
score.description)
scoreValue = score.value
if typeof scoreValue == "number"
scoreValue = scoreValue.toFixed(1)
$score.find('.scoreBody .rawScore .value').text(
scoreValue)
if score.norms?
for norm in score.norms
$norm = $(NORM_HTML)
minAge = norm.cohort?.minAge ? 0
if norm.cohort?.maxAge?
age = minAge + '-' + norm.cohort.maxAge
else
age = minAge + '+'
$norm.find('.age').text(age)
$norm.find('.mean').text(norm.mean ? '-')
$norm.find('.stddev').text(norm.stddev ? '-')
percentile = norm.percentile
# infer percentile
if (not percentile?) and norm.mean? and norm.stddev
g = gaussian(norm.mean, norm.stddev * norm.stddev)
percentile = 100 * g.cdf(score.value)
if score.lessIsMore
percentile = 100 - percentile
if percentile?
percentile = Math.floor(percentile) + '%'
$norm.find('.percentile').text(percentile ? '-')
$score.find('.scoreBody .norms tbody').append($norm)
catchTrialScore = "N/A"
if score.catchTrialsScore?
catchTrialScore = parseInt(score.catchTrialsScore) + '%'
$score.find('.scoreBody .catchTrialScore .value').text( \
catchTrialScore)
$scoresDiv.append($score)
| true | ###
Copyright (c) 2014, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# Functions only used by the TabCAT console, and only available from
# there. Used to be in TabCAT.UI.
@TabCAT ?= {}
TabCAT.Console = {}
# INITIALIZATION
# default fallback language, for i18n
DEFAULT_FALLBACK_LNG = 'en'
# call this first. Analogous to TabCAT.Task.start()
#
# this sets up i18n, starts sync of spilled docs, and updates
# the status bar (once the page is ready)
TabCAT.Console.start = _.once((options) ->
# set up i18n
i18n_options = _.extend(
{fallbackLng: DEFAULT_FALLBACK_LNG, resStore: {}},
options?.i18n)
$.i18n.init(i18n_options)
# sync spilled docs
TabCAT.DB.startSpilledDocSync()
# update status bar
$(TabCAT.Console.updateStatusBar)
)
# STATUS BAR
# warn when local storage is more than 75% full
# typical tasks use 0.5% of browser storage
LOCAL_STORAGE_WARNING_THRESHOLD = 75
# keep status messages for at least a second
OFFLINE_STATUS_MIN_CHANGE_TIME = 2000
# DB where design docs and task content is stored
TABCAT_DB = 'tabcat'
keepOfflineStatusUntil = null
lastOfflineStatusType = 0
# update the statusBar div, populating it if necessary
#
# this also implicitly unsets patientHasDevice
#
# this is also responsible for swapping in updated versions of the
# application cache (Android browser seems to need this)
TabCAT.Console.updateStatusBar = ->
TabCAT.Task.patientHasDevice(false)
$statusBar = $('#statusBar')
if TabCAT.Console.inSandbox()
$statusBar.addClass('sandbox')
# populate with new HTML if we didn't already
if $statusBar.find('div.left').length is 0
$statusBar.html(
"""
<div class="left">
<span class="banner"></span>
<span class="version"></span>
<p class="offline"></p>
</div>
<div class="right">
<p class="email"> </p>
<button class="login" style="display:none"></button>
<button class="languageButton" id="englishButton">English</button>
<button class="languageButton" id="spanishButton">Spanish</button>
</div>
<div class="center">
<p class="encounter"></p>
<p class="clock"></p>
</div>
"""
)
#temporary way to switch between english and spanish
$('#englishButton').touchdown =>
window.localStorage.currentLanguage = 'en'
$('#spanishButton').touchdown =>
window.localStorage.currentLanguage = 'es'
$statusBar.find('.version').text(TabCAT.version)
$statusBar.find('button.login').on('click', (event) ->
button = $(event.target)
if button.text() == 'Log Out'
$emailP = $statusBar.find('p.email')
# do something even if logout is slow
oldContents = $emailP.html()
$statusBar.find('p.email').text('Logging out...')
TabCAT.UI.logout()
# this only happens if user decides not to log out
$emailP.html(oldContents)
else
TabCAT.UI.requestLogin()
)
$emailP = $statusBar.find('p.email')
$button = $statusBar.find('button.login')
$encounterP = $statusBar.find('p.encounter')
user = TabCAT.User.get()
if user?
$emailP.text(user)
$button.text('Log Out')
else
$emailP.text('not logged in')
$button.text('Log In')
$button.show()
# only check offline status occasionally
updateOfflineStatus()
TabCAT.Console.updateStatusBar.offlineInterval = window.setInterval(
updateOfflineStatus, 500)
# don't show encounter info unless patient is logged in
patientCode = TabCAT.Encounter.getPatientCode()
if patientCode? and user?
encounterNum = TabCAT.Encounter.getNum()
encounterNumText = if encounterNum? then ' #' + encounterNum else ''
$encounterP.text(
'Encounter' + encounterNumText + ' with Patient ' + patientCode)
if not TabCAT.Console.updateStatusBar.clockInterval?
TabCAT.Console.updateStatusBar.clockInterval = window.setInterval(
updateEncounterClock, 50)
else
$encounterP.empty()
if TabCAT.Console.updateStatusBar.clockInterval?
window.clearInterval(TabCAT.Console.updateStatusBar.clockInterval)
$statusBar.find('p.clock').empty()
# update the encounter clock on the statusBar
updateEncounterClock = ->
# handle end of encounter gracefully
if TabCAT.Encounter.isOpen()
now = TabCAT.Clock.now()
seconds = Math.floor(now / 1000) % 60
if seconds < 10
seconds = '0' + seconds
minutes = Math.floor(now / 60000) % 60
if minutes < 10
minutes = '0' + minutes
hours = Math.floor(now / 3600000)
time = hours + ':' + minutes + ':' + seconds
$('#statusBar p.clock').text(time)
else
$('#statusBar p.clock').empty()
# update the offline status on the statusBar, while attempting not
# to flicker status messages so quickly that we can't read them
updateOfflineStatus = ->
now = $.now()
[statusType, statusHtml] = offlineStatusTypeAndHtml()
if (keepOfflineStatusUntil? and now < keepOfflineStatusUntil \
and statusType isnt lastOfflineStatusType)
return
# don't bother holding blank message for a second
if statusHtml
lastOfflineStatusType = statusType
keepOfflineStatusUntil = now + OFFLINE_STATUS_MIN_CHANGE_TIME
$('#statusBar').find('p.offline').html(statusHtml)
# return the type of offline status and html to display.
#
# This also swaps in an updated application cache, if necessary
offlineStatusTypeAndHtml = ->
now = $.now()
appcache = window.applicationCache
# if there's an updated version of the cache ready, swap it in
if appcache.status is appcache.UPDATEREADY
appcache.swapCache()
if navigator.onLine is false
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [1, '<span class="warning">PLEASE CONNECT TO NETWORK</span>']
else
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [2, 'OFFLINE MODE (storage ' + percentFullHtml + ')']
else
return [2, 'OFFLINE MODE']
if appcache.status is appcache.DOWNLOADING
return [3, 'loading content for offline mode']
if (appcache.status is appcache.UNCACHED or \
appcache.status >= appcache.OBSOLETE)
return [4, '<span class="warning">offline mode unavailable</span>']
# not exactly offline, but can't sync (maybe wrong network?)
percentFullHtml = offlineStatusStoragePercentFullHtml()
if percentFullHtml
return [5, 'offline storage ' + percentFullHtml]
return [0, '']
# helper for offlineStatusHtml(). returns "#.#% full" plus markup
offlineStatusStoragePercentFullHtml = ->
if not TabCAT.DB.spilledDocsRemain()
return ''
percentFull = TabCAT.DB.percentOfLocalStorageUsed()
percentFullHtml = Math.min(percentFull, 100).toFixed(1) + '% full'
if percentFull >= LOCAL_STORAGE_WARNING_THRESHOLD
percentFullHtml = '<span class="warning">' + percentFullHtml + '</span>'
return percentFullHtml
TabCAT.Console.DEFAULT_TASK_ICON_URL = (
"/#{TABCAT_DB}/_design/console/img/icon.png")
# extract icon URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskIconUrl = (task) ->
if task.designDocId? and task.icon?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.icon}"
else
DEFAULT_TASK_ICON_URL
# extract start URL from task info (from TabCAT.Task.getTaskInfo())
TabCAT.Console.getTaskStartUrl = (task) ->
if task.designDocId? and task.start?
"/#{TABCAT_DB}/#{task.designDocId}/#{task.start}"
# used by inSandbox()
SANDBOX_REGEX = \
/(sandbox|^\d+\.\d+\.\d+\.\d+$)/i
# Infer from the hostname whether we're in sandbox mode. This happens if it
# contains "sandbox" or is an IP address.
#
# Sandbox mode is meant to only affect the UI: different warning messages,
# pre-filled form inputs, etc.
#
# We intentially don't do anything for the hostname "localhost"
# so that it's easy to test non-sandbox mode (use "127.0.0.1" instead).
#
# You can optionally pass in a hostname (by default we use
# window.location.hostname).
TabCAT.Console.inSandbox = (hostname) ->
SANDBOX_REGEX.test(hostname ? window.location.hostname)
# constants for sandbox mode
TabCAT.Console.sandboxIcon = 'img/sandbox/icon.png'
TabCAT.Console.sandboxPassword = 'PI:PASSWORD:<PASSWORD>END_PI'
TabCAT.Console.sandboxTitle = 'TabCAT Sandbox'
TabCAT.Console.sandboxUser = 's@ndbox'
# displaying scores
SCORE_HTML = '''
<div class="score">
<div class="scoreHeader">
<span class="description"></span>
</div>
<div class="scoreBody">
<div class="rawScore">
<p class="description">Raw Score</p>
<p class="value"></p>
</div>
<div class="catchTrialScore">
<p class="description">Catch Trial Score</p>
<p class="value"></p>
</div>
<div class="norms">
<table class="norm">
<thead>
<tr>
<th class="age">Age</th>
<th class="mean">Mean</th>
<th class="stddev">Std. Dev.</th>
<th class="percentile">Percentile</th>
</tr>
</thead>
<tbody>
</tbody>
</table>
</div>
</div>
</div>
'''
NORM_HTML = '''
<tr>
<td class="age"></td>
<td class="mean"></td>
<td class="stddev"></td>
<td class="percentile"></td>
</tr>
'''
TabCAT.Console.populateWithScores = ($scoresDiv, scores) ->
$scoresDiv.empty()
for score in scores
$score = $(SCORE_HTML)
$score.find('.scoreHeader .description').text(
score.description)
scoreValue = score.value
if typeof scoreValue == "number"
scoreValue = scoreValue.toFixed(1)
$score.find('.scoreBody .rawScore .value').text(
scoreValue)
if score.norms?
for norm in score.norms
$norm = $(NORM_HTML)
minAge = norm.cohort?.minAge ? 0
if norm.cohort?.maxAge?
age = minAge + '-' + norm.cohort.maxAge
else
age = minAge + '+'
$norm.find('.age').text(age)
$norm.find('.mean').text(norm.mean ? '-')
$norm.find('.stddev').text(norm.stddev ? '-')
percentile = norm.percentile
# infer percentile
if (not percentile?) and norm.mean? and norm.stddev
g = gaussian(norm.mean, norm.stddev * norm.stddev)
percentile = 100 * g.cdf(score.value)
if score.lessIsMore
percentile = 100 - percentile
if percentile?
percentile = Math.floor(percentile) + '%'
$norm.find('.percentile').text(percentile ? '-')
$score.find('.scoreBody .norms tbody').append($norm)
catchTrialScore = "N/A"
if score.catchTrialsScore?
catchTrialScore = parseInt(score.catchTrialsScore) + '%'
$score.find('.scoreBody .catchTrialScore .value').text( \
catchTrialScore)
$scoresDiv.append($score)
|
[
{
"context": "\n return buf\n\n Key1 = _createKey 0x36, [\n 0x65, 0x64, 0x70, 0x7B, 0x7D, 0x65, 0x7C, 0x77,\n 0x",
"end": 466,
"score": 0.8173816800117493,
"start": 463,
"tag": "KEY",
"value": "x65"
},
{
"context": "eturn buf\n\n Key1 = _createKey 0x36, [\n 0x65, 0x64, ... | gatekeeper.coffee | ozjd/gatekeeper-sspi | 0 | crypto = require 'crypto'
uuid = require 'node-uuid'
module.exports = class GateKeeper
# Private class with useful functions. NOT EXPOSED.
@newGUID = -> uuid.v4 { }, Buffer.allocUnsafe 16
@emptyGUID = -> Buffer.from [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
_createKey = (pad, arr) -> # Function to make writing keys easier
buf = Buffer.alloc 64, pad, 'binary'
Buffer.from(arr).copy buf
return buf
Key1 = _createKey 0x36, [
0x65, 0x64, 0x70, 0x7B, 0x7D, 0x65, 0x7C, 0x77,
0x78, 0x72, 0x64, 0x73, 0x65, 0x7D, 0x7D, 0x75,
]
Key2 = _createKey 0x5C, [
0x0F, 0x0E, 0x1A, 0x11, 0x17, 0x0F, 0x16, 0x1D,
0x12, 0x18, 0x0E, 0x19, 0x0F, 0x17, 0x17, 0x1F
]
md5 = (data) -> # Takes Buffer, returns Buffer
return do
crypto.createHash 'md5'
.update data
.digest
@Signature: Buffer.from [0x47, 0x4B, 0x53, 0x53, 0x50, 0x00]
@OK: Buffer.from [0x4F, 0x4B]
constructor: ->
# Defaults:
@version = 2
@passport = false
@msgType = 0
@type = 'GateKeeper'
calculate: (challenge) -> # This is where the magic is done!
if challenge !instanceof Buffer
throw TypeError 'challenge must be a Buffer'
if @version is 3 # GKSSPv3
buf = Buffer.allocUnsafe challenge.length + @host.length # temporary buf
challenge.copy buf # Copy original challenge to new buffer
buf.write @host, challenge.length, @host.length, 'binary' # Add host
challenge = buf # update 'challenge' variable
b1 = Buffer.allocUnsafe Key1.length + challenge.length # buffer1
Key1.copy b1 # Copy key1 to buffer1
challenge.copy b1, Key1.length # Copy key1 to buffer1
b2 = md5 b1 # buffer2 - md5 of buffer1
b3 = Buffer.allocUnsafe Key2.length + b2.length # buffer3
Key2.copy b3 # Copy key2 to buffer3
b2.copy b3, Key2.length # Append buffer2 to buffer3
return md5 b3 # The final result is md5 of buffer3
createHeader: (version, msgType) ->
header = Buffer.allocUnsafe 16 # New buffer
GateKeeper.Signature.copy header # Add header to buffer
header.writeUInt32LE version, 8 # Add version to buffer
header.writeUInt32LE msgType, 12 # Add msgType to buffer
return header # return buffer
readHeader: (data) -> # Takes header, returns Object
return Object.freeze # Can't be modified - Ever!
signature: data.slice 0, 6
version: data.readUInt32LE 8
msgType: data.readUInt32LE 12
| 130406 | crypto = require 'crypto'
uuid = require 'node-uuid'
module.exports = class GateKeeper
# Private class with useful functions. NOT EXPOSED.
@newGUID = -> uuid.v4 { }, Buffer.allocUnsafe 16
@emptyGUID = -> Buffer.from [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
_createKey = (pad, arr) -> # Function to make writing keys easier
buf = Buffer.alloc 64, pad, 'binary'
Buffer.from(arr).copy buf
return buf
Key1 = _createKey 0x36, [
0<KEY>, 0<KEY>, 0<KEY>, <KEY>, <KEY>, <KEY>, 0<KEY>, 0<KEY>,
0x7<KEY>, 0<KEY>, 0<KEY>, <KEY>, 0<KEY>, <KEY>, <KEY>, <KEY>,
]
Key2 = _createKey 0x5C, [
0x0<KEY>, 0x0E, 0x1A, 0x11, 0x17, 0x0F, 0x16, 0x1D,
0x12, 0x18, 0x0E, 0x19, 0x0F, 0x17, 0x17, 0x1F
]
md5 = (data) -> # Takes Buffer, returns Buffer
return do
crypto.createHash 'md5'
.update data
.digest
@Signature: Buffer.from [0x47, 0x4B, 0x53, 0x53, 0x50, 0x00]
@OK: Buffer.from [0x4F, 0x4B]
constructor: ->
# Defaults:
@version = 2
@passport = false
@msgType = 0
@type = 'GateKeeper'
calculate: (challenge) -> # This is where the magic is done!
if challenge !instanceof Buffer
throw TypeError 'challenge must be a Buffer'
if @version is 3 # GKSSPv3
buf = Buffer.allocUnsafe challenge.length + @host.length # temporary buf
challenge.copy buf # Copy original challenge to new buffer
buf.write @host, challenge.length, @host.length, 'binary' # Add host
challenge = buf # update 'challenge' variable
b1 = Buffer.allocUnsafe Key1.length + challenge.length # buffer1
Key1.copy b1 # Copy key1 to buffer1
challenge.copy b1, Key1.length # Copy key1 to buffer1
b2 = md5 b1 # buffer2 - md5 of buffer1
b3 = Buffer.allocUnsafe Key2.length + b2.length # buffer3
Key2.copy b3 # Copy key2 to buffer3
b2.copy b3, Key2.length # Append buffer2 to buffer3
return md5 b3 # The final result is md5 of buffer3
createHeader: (version, msgType) ->
header = Buffer.allocUnsafe 16 # New buffer
GateKeeper.Signature.copy header # Add header to buffer
header.writeUInt32LE version, 8 # Add version to buffer
header.writeUInt32LE msgType, 12 # Add msgType to buffer
return header # return buffer
readHeader: (data) -> # Takes header, returns Object
return Object.freeze # Can't be modified - Ever!
signature: data.slice 0, 6
version: data.readUInt32LE 8
msgType: data.readUInt32LE 12
| true | crypto = require 'crypto'
uuid = require 'node-uuid'
module.exports = class GateKeeper
# Private class with useful functions. NOT EXPOSED.
@newGUID = -> uuid.v4 { }, Buffer.allocUnsafe 16
@emptyGUID = -> Buffer.from [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
_createKey = (pad, arr) -> # Function to make writing keys easier
buf = Buffer.alloc 64, pad, 'binary'
Buffer.from(arr).copy buf
return buf
Key1 = _createKey 0x36, [
0PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI,
0x7PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, 0PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI, PI:KEY:<KEY>END_PI,
]
Key2 = _createKey 0x5C, [
0x0PI:KEY:<KEY>END_PI, 0x0E, 0x1A, 0x11, 0x17, 0x0F, 0x16, 0x1D,
0x12, 0x18, 0x0E, 0x19, 0x0F, 0x17, 0x17, 0x1F
]
md5 = (data) -> # Takes Buffer, returns Buffer
return do
crypto.createHash 'md5'
.update data
.digest
@Signature: Buffer.from [0x47, 0x4B, 0x53, 0x53, 0x50, 0x00]
@OK: Buffer.from [0x4F, 0x4B]
constructor: ->
# Defaults:
@version = 2
@passport = false
@msgType = 0
@type = 'GateKeeper'
calculate: (challenge) -> # This is where the magic is done!
if challenge !instanceof Buffer
throw TypeError 'challenge must be a Buffer'
if @version is 3 # GKSSPv3
buf = Buffer.allocUnsafe challenge.length + @host.length # temporary buf
challenge.copy buf # Copy original challenge to new buffer
buf.write @host, challenge.length, @host.length, 'binary' # Add host
challenge = buf # update 'challenge' variable
b1 = Buffer.allocUnsafe Key1.length + challenge.length # buffer1
Key1.copy b1 # Copy key1 to buffer1
challenge.copy b1, Key1.length # Copy key1 to buffer1
b2 = md5 b1 # buffer2 - md5 of buffer1
b3 = Buffer.allocUnsafe Key2.length + b2.length # buffer3
Key2.copy b3 # Copy key2 to buffer3
b2.copy b3, Key2.length # Append buffer2 to buffer3
return md5 b3 # The final result is md5 of buffer3
createHeader: (version, msgType) ->
header = Buffer.allocUnsafe 16 # New buffer
GateKeeper.Signature.copy header # Add header to buffer
header.writeUInt32LE version, 8 # Add version to buffer
header.writeUInt32LE msgType, 12 # Add msgType to buffer
return header # return buffer
readHeader: (data) -> # Takes header, returns Object
return Object.freeze # Can't be modified - Ever!
signature: data.slice 0, 6
version: data.readUInt32LE 8
msgType: data.readUInt32LE 12
|
[
{
"context": "nit\n principal: 'admin'\n password: 'adm1n_p4ssw0rd'\n realm: 'KRB.LOCAL'\n , (err, ccname)",
"end": 224,
"score": 0.9992606043815613,
"start": 210,
"tag": "PASSWORD",
"value": "adm1n_p4ssw0rd"
},
{
"context": "nit\n principal: 'admin'\... | test/kdestroy.coffee | pdkovacs/forked-node-krb5 | 28 | krb5 = require '../lib/'
describe 'kdestroy', ->
describe 'function with callback', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
, (err, ccname) ->
krb5.kdestroy (err) ->
(err is undefined).should.be.true()
krb5.kdestroy (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.true()
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
describe 'function with promise', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
.then (ccname) ->
krb5.kdestroy()
.then ->
krb5.kdestroy()
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
ccname: "/tmp/customcc"
.then (ccname) ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return | 108371 | krb5 = require '../lib/'
describe 'kdestroy', ->
describe 'function with callback', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
, (err, ccname) ->
krb5.kdestroy (err) ->
(err is undefined).should.be.true()
krb5.kdestroy (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.true()
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
describe 'function with promise', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
.then (ccname) ->
krb5.kdestroy()
.then ->
krb5.kdestroy()
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
ccname: "/tmp/customcc"
.then (ccname) ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return | true | krb5 = require '../lib/'
describe 'kdestroy', ->
describe 'function with callback', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
, (err, ccname) ->
krb5.kdestroy (err) ->
(err is undefined).should.be.true()
krb5.kdestroy (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.true()
krb5.kdestroy
ccname: '/tmp/customcc'
, (err) ->
(err is undefined).should.be.false()
err.message.should.startWith 'No credentials cache found'
done()
describe 'function with promise', ->
it 'destroys default credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
.then (ccname) ->
krb5.kdestroy()
.then ->
krb5.kdestroy()
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return
it 'destroys given credential cache', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
ccname: "/tmp/customcc"
.then (ccname) ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
krb5.kdestroy
ccname: "/tmp/customcc"
.then ->
done Error 'Should not be able to redestroy cache'
.catch (err) ->
err.message.should.startWith 'No credentials cache found'
done()
return |
[
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com> \n© Copyright 2006 Goog",
"end": 39,
"score": 0.9998794794082642,
"start": 26,
"tag": "NAME",
"value": "Stephan Jorek"
},
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>... | src/Action/Expression/Javascript.coffee | sjorek/goatee.js | 0 | ###
© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
STRING_variables
STRING_data
STRING_with
}} = require '../../Core/Constants'
{UnorderedRules:{
parse
}} = require '../../Map/UnorderedRules'
{Utility:{
trim
}} = require '../../Core/Utility'
exports = module?.exports ? this
###
Javascript
@class
@namespace goatee.Action.Expression
###
exports.Javascript = class Javascript
##
# Wrapper for @evaluateExpression() catching and logging any Errors
# raised during expression evaluation to console.
#
# @param {String} expression
# @return {Object|null}
evaluate: (expression) ->
try
@evaluateExpression(expression)
catch e
console.log "Failed to evaluate “#{expression}”: #{e}"
return null
##
# Wrapper for the eval() builtin function to evaluate expressions and
# obtain their value. It wraps the expression in parentheses such
# that object literals are really evaluated to objects. Without the
# wrapping, they are evaluated as block, and create syntax
# errors. Also protects against other syntax errors in the eval()ed
# code and returns null if the eval throws an exception.
# @param {String} expression
# @return {Object|null}
evaluateExpression: (expression) ->
###
NOTE(mesch): An alternative idiom would be:
eval('(' + expr + ')');
Note that using the square brackets as below, "" evals to undefined.
The alternative of using parentheses does not work when evaluating
function literals in IE.
e.g. eval("(function() {})") returns undefined, and not a function
object, in IE.
NOTE(sjorek): Due to the underlying coffescript-specific language
agnostics we deliberatly fall back to vanilla javascript here.
###
return `eval('[' + expression + '][0]')`
##
# Cache for jsEvalToFunction results.
# @type {Object}
_evaluateToFunctionCache = {}
##
# Evaluates the given expression as the body of a function that takes
# variables and data as arguments. Since the resulting function depends
# only on expression, we cache the result so we save some Function
# invocations, and some object creations in IE6.
#
# @param {String} expression A javascript expression.
# @return {Function} A function that returns the expression's value
# in the context of variables and data.
evaluateToFunction: (expression) ->
return _evaluateToFunctionCache[expression] \
unless _evaluateToFunctionCache[expression]?
try
# NOTE(mesch): The Function constructor is faster than eval().
return _evaluateToFunctionCache[expression] = \
Function STRING_variables, STRING_data, STRING_with + expression
catch e
console.log "Failed to evalaluate “#{expression}” to function: #{e}"
return null
##
# Evaluates the given expression to itself. This is meant to pass through
# string action values.
#
# @param {String} expression
# @return {String}
evaluateToSelf = (expression) ->
return expression
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# @param {String} expressions
# @return {Array}
evaluateToFunctions: (expressions) ->
# TODO(mesch): It is insufficient to split the values by simply finding
# semicolons, as the semicolon may be part of a string Constants or escaped.
# TODO(sjorek): This does not look like coffescript … Das ist Doof :-)
result = []
for expression in expressions.split Constants.REGEXP_semicolon
colon = expression.indexOf(Constants.CHAR_colon)
continue if colon < 0
key = trim expression.substr(0, colon)
value = @evaluateToFunction expression.substr(colon + 1)
result.push(key, value)
return result
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# Fixes the insufficient implementation of @evaluateToFunctions(expressions)
#
# @param {String} expressions
# @param {UnorderedRules} Optional instance populated with rules from
# expressions. For internal use only.
# @return {Array}
evaluateToRules: (expressions, _rules) ->
self = @
_rules = parse expressions, _rules
result = []
collect = (key, value, priority) ->
result.push(key, self.evaluateToFunction value)
_rules.each collect
return result
##
# Parses the value of the execute actions in goatee-templates: splits it up
# into a list of expressions, and creates anonymous functions from the
# expressions, hence closures, that are suitable for execution by
# @evaluateExpression().
#
# All that is returned as an Array of Functions.
#
# @param {String} expressions
# @return {Array.<Function>}
evaluateToClosures: (expressions) ->
@evaluateToFunction expression \
for expression in expressions.split Constants.REGEXP_semicolon \
when expression
##
# Reference to singleton instance
# @type {goatee.Action.Expression.Javascript}
_instance = Javascript.instance = null
##
# Singleton implementation
# @return {goatee.Action.Expression.Javascript}
Javascript.get = () ->
_instance ? (_instance = new Javascript)
| 93581 | ###
© Copyright 2013-2014 <NAME> <<EMAIL>>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
STRING_variables
STRING_data
STRING_with
}} = require '../../Core/Constants'
{UnorderedRules:{
parse
}} = require '../../Map/UnorderedRules'
{Utility:{
trim
}} = require '../../Core/Utility'
exports = module?.exports ? this
###
Javascript
@class
@namespace goatee.Action.Expression
###
exports.Javascript = class Javascript
##
# Wrapper for @evaluateExpression() catching and logging any Errors
# raised during expression evaluation to console.
#
# @param {String} expression
# @return {Object|null}
evaluate: (expression) ->
try
@evaluateExpression(expression)
catch e
console.log "Failed to evaluate “#{expression}”: #{e}"
return null
##
# Wrapper for the eval() builtin function to evaluate expressions and
# obtain their value. It wraps the expression in parentheses such
# that object literals are really evaluated to objects. Without the
# wrapping, they are evaluated as block, and create syntax
# errors. Also protects against other syntax errors in the eval()ed
# code and returns null if the eval throws an exception.
# @param {String} expression
# @return {Object|null}
evaluateExpression: (expression) ->
###
NOTE(mesch): An alternative idiom would be:
eval('(' + expr + ')');
Note that using the square brackets as below, "" evals to undefined.
The alternative of using parentheses does not work when evaluating
function literals in IE.
e.g. eval("(function() {})") returns undefined, and not a function
object, in IE.
NOTE(sjorek): Due to the underlying coffescript-specific language
agnostics we deliberatly fall back to vanilla javascript here.
###
return `eval('[' + expression + '][0]')`
##
# Cache for jsEvalToFunction results.
# @type {Object}
_evaluateToFunctionCache = {}
##
# Evaluates the given expression as the body of a function that takes
# variables and data as arguments. Since the resulting function depends
# only on expression, we cache the result so we save some Function
# invocations, and some object creations in IE6.
#
# @param {String} expression A javascript expression.
# @return {Function} A function that returns the expression's value
# in the context of variables and data.
evaluateToFunction: (expression) ->
return _evaluateToFunctionCache[expression] \
unless _evaluateToFunctionCache[expression]?
try
# NOTE(mesch): The Function constructor is faster than eval().
return _evaluateToFunctionCache[expression] = \
Function STRING_variables, STRING_data, STRING_with + expression
catch e
console.log "Failed to evalaluate “#{expression}” to function: #{e}"
return null
##
# Evaluates the given expression to itself. This is meant to pass through
# string action values.
#
# @param {String} expression
# @return {String}
evaluateToSelf = (expression) ->
return expression
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# @param {String} expressions
# @return {Array}
evaluateToFunctions: (expressions) ->
# TODO(mesch): It is insufficient to split the values by simply finding
# semicolons, as the semicolon may be part of a string Constants or escaped.
# TODO(sjorek): This does not look like coffescript … Das ist Doof :-)
result = []
for expression in expressions.split Constants.REGEXP_semicolon
colon = expression.indexOf(Constants.CHAR_colon)
continue if colon < 0
key = trim expression.substr(0, colon)
value = @evaluateToFunction expression.substr(colon + 1)
result.push(key, value)
return result
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# Fixes the insufficient implementation of @evaluateToFunctions(expressions)
#
# @param {String} expressions
# @param {UnorderedRules} Optional instance populated with rules from
# expressions. For internal use only.
# @return {Array}
evaluateToRules: (expressions, _rules) ->
self = @
_rules = parse expressions, _rules
result = []
collect = (key, value, priority) ->
result.push(key, self.evaluateToFunction value)
_rules.each collect
return result
##
# Parses the value of the execute actions in goatee-templates: splits it up
# into a list of expressions, and creates anonymous functions from the
# expressions, hence closures, that are suitable for execution by
# @evaluateExpression().
#
# All that is returned as an Array of Functions.
#
# @param {String} expressions
# @return {Array.<Function>}
evaluateToClosures: (expressions) ->
@evaluateToFunction expression \
for expression in expressions.split Constants.REGEXP_semicolon \
when expression
##
# Reference to singleton instance
# @type {goatee.Action.Expression.Javascript}
_instance = Javascript.instance = null
##
# Singleton implementation
# @return {goatee.Action.Expression.Javascript}
Javascript.get = () ->
_instance ? (_instance = new Javascript)
| true | ###
© Copyright 2013-2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
STRING_variables
STRING_data
STRING_with
}} = require '../../Core/Constants'
{UnorderedRules:{
parse
}} = require '../../Map/UnorderedRules'
{Utility:{
trim
}} = require '../../Core/Utility'
exports = module?.exports ? this
###
Javascript
@class
@namespace goatee.Action.Expression
###
exports.Javascript = class Javascript
##
# Wrapper for @evaluateExpression() catching and logging any Errors
# raised during expression evaluation to console.
#
# @param {String} expression
# @return {Object|null}
evaluate: (expression) ->
try
@evaluateExpression(expression)
catch e
console.log "Failed to evaluate “#{expression}”: #{e}"
return null
##
# Wrapper for the eval() builtin function to evaluate expressions and
# obtain their value. It wraps the expression in parentheses such
# that object literals are really evaluated to objects. Without the
# wrapping, they are evaluated as block, and create syntax
# errors. Also protects against other syntax errors in the eval()ed
# code and returns null if the eval throws an exception.
# @param {String} expression
# @return {Object|null}
evaluateExpression: (expression) ->
###
NOTE(mesch): An alternative idiom would be:
eval('(' + expr + ')');
Note that using the square brackets as below, "" evals to undefined.
The alternative of using parentheses does not work when evaluating
function literals in IE.
e.g. eval("(function() {})") returns undefined, and not a function
object, in IE.
NOTE(sjorek): Due to the underlying coffescript-specific language
agnostics we deliberatly fall back to vanilla javascript here.
###
return `eval('[' + expression + '][0]')`
##
# Cache for jsEvalToFunction results.
# @type {Object}
_evaluateToFunctionCache = {}
##
# Evaluates the given expression as the body of a function that takes
# variables and data as arguments. Since the resulting function depends
# only on expression, we cache the result so we save some Function
# invocations, and some object creations in IE6.
#
# @param {String} expression A javascript expression.
# @return {Function} A function that returns the expression's value
# in the context of variables and data.
evaluateToFunction: (expression) ->
return _evaluateToFunctionCache[expression] \
unless _evaluateToFunctionCache[expression]?
try
# NOTE(mesch): The Function constructor is faster than eval().
return _evaluateToFunctionCache[expression] = \
Function STRING_variables, STRING_data, STRING_with + expression
catch e
console.log "Failed to evalaluate “#{expression}” to function: #{e}"
return null
##
# Evaluates the given expression to itself. This is meant to pass through
# string action values.
#
# @param {String} expression
# @return {String}
evaluateToSelf = (expression) ->
return expression
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# @param {String} expressions
# @return {Array}
evaluateToFunctions: (expressions) ->
# TODO(mesch): It is insufficient to split the values by simply finding
# semicolons, as the semicolon may be part of a string Constants or escaped.
# TODO(sjorek): This does not look like coffescript … Das ist Doof :-)
result = []
for expression in expressions.split Constants.REGEXP_semicolon
colon = expression.indexOf(Constants.CHAR_colon)
continue if colon < 0
key = trim expression.substr(0, colon)
value = @evaluateToFunction expression.substr(colon + 1)
result.push(key, value)
return result
##
# Parses the value of the alter action in goatee-templates: splits it up into
# a map of keys and expressions, and creates functions from the expressions
# that are suitable for execution by @evaluateExpression(). All that is
# returned as a flattened array of pairs of a String and a Function.
#
# Fixes the insufficient implementation of @evaluateToFunctions(expressions)
#
# @param {String} expressions
# @param {UnorderedRules} Optional instance populated with rules from
# expressions. For internal use only.
# @return {Array}
evaluateToRules: (expressions, _rules) ->
self = @
_rules = parse expressions, _rules
result = []
collect = (key, value, priority) ->
result.push(key, self.evaluateToFunction value)
_rules.each collect
return result
##
# Parses the value of the execute actions in goatee-templates: splits it up
# into a list of expressions, and creates anonymous functions from the
# expressions, hence closures, that are suitable for execution by
# @evaluateExpression().
#
# All that is returned as an Array of Functions.
#
# @param {String} expressions
# @return {Array.<Function>}
evaluateToClosures: (expressions) ->
@evaluateToFunction expression \
for expression in expressions.split Constants.REGEXP_semicolon \
when expression
##
# Reference to singleton instance
# @type {goatee.Action.Expression.Javascript}
_instance = Javascript.instance = null
##
# Singleton implementation
# @return {goatee.Action.Expression.Javascript}
Javascript.get = () ->
_instance ? (_instance = new Javascript)
|
[
{
"context": "NTS\" : \"Комментарии\",\n \"PASSWORD_PLACEHOLDER\" : \"Пароль\",\n \"PASSWORD\" : \"Пароль\",\n \"ABOUT_PLACEHOLDER\" ",
"end": 194,
"score": 0.9949074387550354,
"start": 188,
"tag": "PASSWORD",
"value": "Пароль"
},
{
"context": "PASSWORD_PLACEHOLDER\" : \"Пароль\",... | app/constants/translation_ru.coffee | Nikitzu/Final_Proj | 0 | app = angular.module "myApp"
app.constant "translation_ru",
"SEND" : "Отправить",
"COMMENTS_PLACEHOLDER" : "Ваш комментарий",
"COMMENTS" : "Комментарии",
"PASSWORD_PLACEHOLDER" : "Пароль",
"PASSWORD" : "Пароль",
"ABOUT_PLACEHOLDER" : "Расскажите о себе",
"ABOUT" : "О Вас",
"EMAIL_PLACEHOLDER" : "Почта",
"EMAIL" : "Почта",
"SURNAME_PLACEHOLDER" : "Фамилия",
"SURNAME" : "Фамилия",
"NAME_PLACEHOLDER" : "Имя",
"NAME" : "Имя",
"YOUTUBE_VIDEO" : "Youtube видео",
"TEMPLATES" : "Выберите шаблон:",
"TAGS" : "Введите тэги",
"ARTICLE_PLACEHOLDER" : "Ваша статья",
"ARTICLE" : "Статья",
"IMAGE" : "Фото",
"DESCRIPTION_PLACEHOLDER" : "Краткое описание",
"DESCRIPTION" : "Описание",
"TITLE_PLACEHOLDER" : "Ваш заголовок",
"TITLE" : "Заголовок",
"SEARCH" : "Поиск...",
"MAIN" : "Главная",
"SIGN_OUT" : "Выйти",
"SIGN_IN" : "Войти",
"SIGN_UP" : "Регистрация",
"HOME" : "Личный кабинет",
"RATE" : "Рейтинг",
"COLOR" : "Цвет",
"light": "День",
"dark": "Ночь",
"CREATE": "Создать",
"SETTINGS": "Настройки",
"SAVE": "Сохранить",
"ADD": "Добавить"
"SUBMIT": "Подтвердить",
"CANCEL": "Отмена",
"CATEGORY": "Категория:",
"COMPUTER": "Компьютерные технологии",
"LANGUAGE": "Языки",
"WORLD":"Вокруг света",
"BIOLOGY": "Биология",
"PHOTO": "Фотография",
"USERPUBLICATIONS": "Мои публикации",
"BYRATING": "популярности",
"BYDATE":"дате",
"SORTBY":"Сортировать по: ",
"SELECT_IMAGE" : "Выбор фото",
"CHANGE_IMAGE" : "Изменить",
"REMOVE_IMAGE" : "Удалить",
"SELECT_THEME": "Выберите тему",
"INCORRECT_DATA" : "Неверный логин или пароль",
"ACHIVEMENTS": "Ваши достижения: "
| 204365 | app = angular.module "myApp"
app.constant "translation_ru",
"SEND" : "Отправить",
"COMMENTS_PLACEHOLDER" : "Ваш комментарий",
"COMMENTS" : "Комментарии",
"PASSWORD_PLACEHOLDER" : "<PASSWORD>",
"PASSWORD" : "<PASSWORD>",
"ABOUT_PLACEHOLDER" : "Расскажите о себе",
"ABOUT" : "О Вас",
"EMAIL_PLACEHOLDER" : "Почта",
"EMAIL" : "Почта",
"SURNAME_PLACEHOLDER" : "<NAME>",
"SURNAME" : "<NAME>",
"NAME_PLACEHOLDER" : "<NAME>",
"NAME" : "<NAME>",
"YOUTUBE_VIDEO" : "Youtube видео",
"TEMPLATES" : "Выберите шаблон:",
"TAGS" : "Введите тэги",
"ARTICLE_PLACEHOLDER" : "Ваша статья",
"ARTICLE" : "Статья",
"IMAGE" : "Фото",
"DESCRIPTION_PLACEHOLDER" : "Краткое описание",
"DESCRIPTION" : "Описание",
"TITLE_PLACEHOLDER" : "Ваш заголовок",
"TITLE" : "Заголовок",
"SEARCH" : "Поиск...",
"MAIN" : "Главная",
"SIGN_OUT" : "Выйти",
"SIGN_IN" : "Войти",
"SIGN_UP" : "Регистрация",
"HOME" : "Личный кабинет",
"RATE" : "Рейтинг",
"COLOR" : "Цвет",
"light": "День",
"dark": "Ночь",
"CREATE": "Создать",
"SETTINGS": "Настройки",
"SAVE": "Сохранить",
"ADD": "Добавить"
"SUBMIT": "Подтвердить",
"CANCEL": "Отмена",
"CATEGORY": "Категория:",
"COMPUTER": "Компьютерные технологии",
"LANGUAGE": "Языки",
"WORLD":"Вокруг света",
"BIOLOGY": "Биология",
"PHOTO": "Фотография",
"USERPUBLICATIONS": "Мои публикации",
"BYRATING": "популярности",
"BYDATE":"дате",
"SORTBY":"Сортировать по: ",
"SELECT_IMAGE" : "Выбор фото",
"CHANGE_IMAGE" : "Изменить",
"REMOVE_IMAGE" : "Удалить",
"SELECT_THEME": "Выберите тему",
"INCORRECT_DATA" : "Неверный логин или пароль",
"ACHIVEMENTS": "Ваши достижения: "
| true | app = angular.module "myApp"
app.constant "translation_ru",
"SEND" : "Отправить",
"COMMENTS_PLACEHOLDER" : "Ваш комментарий",
"COMMENTS" : "Комментарии",
"PASSWORD_PLACEHOLDER" : "PI:PASSWORD:<PASSWORD>END_PI",
"PASSWORD" : "PI:PASSWORD:<PASSWORD>END_PI",
"ABOUT_PLACEHOLDER" : "Расскажите о себе",
"ABOUT" : "О Вас",
"EMAIL_PLACEHOLDER" : "Почта",
"EMAIL" : "Почта",
"SURNAME_PLACEHOLDER" : "PI:NAME:<NAME>END_PI",
"SURNAME" : "PI:NAME:<NAME>END_PI",
"NAME_PLACEHOLDER" : "PI:NAME:<NAME>END_PI",
"NAME" : "PI:NAME:<NAME>END_PI",
"YOUTUBE_VIDEO" : "Youtube видео",
"TEMPLATES" : "Выберите шаблон:",
"TAGS" : "Введите тэги",
"ARTICLE_PLACEHOLDER" : "Ваша статья",
"ARTICLE" : "Статья",
"IMAGE" : "Фото",
"DESCRIPTION_PLACEHOLDER" : "Краткое описание",
"DESCRIPTION" : "Описание",
"TITLE_PLACEHOLDER" : "Ваш заголовок",
"TITLE" : "Заголовок",
"SEARCH" : "Поиск...",
"MAIN" : "Главная",
"SIGN_OUT" : "Выйти",
"SIGN_IN" : "Войти",
"SIGN_UP" : "Регистрация",
"HOME" : "Личный кабинет",
"RATE" : "Рейтинг",
"COLOR" : "Цвет",
"light": "День",
"dark": "Ночь",
"CREATE": "Создать",
"SETTINGS": "Настройки",
"SAVE": "Сохранить",
"ADD": "Добавить"
"SUBMIT": "Подтвердить",
"CANCEL": "Отмена",
"CATEGORY": "Категория:",
"COMPUTER": "Компьютерные технологии",
"LANGUAGE": "Языки",
"WORLD":"Вокруг света",
"BIOLOGY": "Биология",
"PHOTO": "Фотография",
"USERPUBLICATIONS": "Мои публикации",
"BYRATING": "популярности",
"BYDATE":"дате",
"SORTBY":"Сортировать по: ",
"SELECT_IMAGE" : "Выбор фото",
"CHANGE_IMAGE" : "Изменить",
"REMOVE_IMAGE" : "Удалить",
"SELECT_THEME": "Выберите тему",
"INCORRECT_DATA" : "Неверный логин или пароль",
"ACHIVEMENTS": "Ваши достижения: "
|
[
{
"context": " new word as a future WotD\n# -----\n# Copyright (c) Kiruse 2021. Licensed under MIT License\nimport WotDContr",
"end": 183,
"score": 0.9960110187530518,
"start": 177,
"tag": "NAME",
"value": "Kiruse"
}
] | dapp/src/components/wotd-enqueue.coffee | Kiruse/WotD.sol | 0 | ######################################################################
# WotD Enqueue Component
# Allows an admin to enqueue a new word as a future WotD
# -----
# Copyright (c) Kiruse 2021. Licensed under MIT License
import WotDContract from './wotd.contract'
export default
data: ->
pending: false
word: ''
error: ''
inject: ['eth']
computed:
enabled: -> !!this.word and not this.pending
contract: -> if this.eth.signer then new WotDContract(this.eth.signer)
methods:
submit: (evt) ->
evt.preventDefault()
this.enqueue(this.word)
enqueue: (word) ->
if this.contract and word
try
this.pending = true
await this.contract.enqueueWotD(word)
this.error = this.word = ''
catch err
this.error = 'failed - ' + (err.message ? err)
console.error 'enqueue failed due to error', err
finally
this.pending = false
| 72077 | ######################################################################
# WotD Enqueue Component
# Allows an admin to enqueue a new word as a future WotD
# -----
# Copyright (c) <NAME> 2021. Licensed under MIT License
import WotDContract from './wotd.contract'
export default
data: ->
pending: false
word: ''
error: ''
inject: ['eth']
computed:
enabled: -> !!this.word and not this.pending
contract: -> if this.eth.signer then new WotDContract(this.eth.signer)
methods:
submit: (evt) ->
evt.preventDefault()
this.enqueue(this.word)
enqueue: (word) ->
if this.contract and word
try
this.pending = true
await this.contract.enqueueWotD(word)
this.error = this.word = ''
catch err
this.error = 'failed - ' + (err.message ? err)
console.error 'enqueue failed due to error', err
finally
this.pending = false
| true | ######################################################################
# WotD Enqueue Component
# Allows an admin to enqueue a new word as a future WotD
# -----
# Copyright (c) PI:NAME:<NAME>END_PI 2021. Licensed under MIT License
import WotDContract from './wotd.contract'
export default
data: ->
pending: false
word: ''
error: ''
inject: ['eth']
computed:
enabled: -> !!this.word and not this.pending
contract: -> if this.eth.signer then new WotDContract(this.eth.signer)
methods:
submit: (evt) ->
evt.preventDefault()
this.enqueue(this.word)
enqueue: (word) ->
if this.contract and word
try
this.pending = true
await this.contract.enqueueWotD(word)
this.error = this.word = ''
catch err
this.error = 'failed - ' + (err.message ? err)
console.error 'enqueue failed due to error', err
finally
this.pending = false
|
[
{
"context": "to get to the elasticsearch cluster\n#\n# Author:\n# Paul Stack\n\n\nmodule.exports = (robot) ->\n\n search = (msg, s",
"end": 477,
"score": 0.999803900718689,
"start": 467,
"tag": "NAME",
"value": "Paul Stack"
}
] | src/scripts/elasticsearch.coffee | ryantomlinson/hubot-scripts | 0 | # Description:
# Get ElasticSearch Cluster Information
#
# Commands:
# hubot: elasticsearch cluster [server] - Gets the cluster information for the given server
# hubot: elasticsearch node [server] - Gets the node information for the given server
# hubot: elasticsearch query [server] [query] - Runs a specific query against an ElasticSearch cluster
#
# Notes:
# The server must be a fqdn (with the port!) to get to the elasticsearch cluster
#
# Author:
# Paul Stack
module.exports = (robot) ->
search = (msg, server, query) ->
msg.http("http://#{server}/_search?#{query}")
.get() (err, res, body) ->
json = JSON.parse(body)
firsthit = JSON.stringify(json.hits.hits[0])
msg.send("There are #{json.hits.total} results for the search http://#{server}/_search?#{query} \nThe first result is \n#{firsthit}")
cluster_health = (msg, server) ->
msg.http("http://#{server}/_cluster/health")
.get() (err, res, body) ->
json = JSON.parse(body)
cluster_name = json['cluster']
status = json['status']
number_of_nodes = json['number_of_nodes']
msg.send "Cluster: #{cluster_name} \nStatus: #{status} \n Nodes: #{number_of_nodes}"
node_health = (msg, server) ->
msg.http("http://#{server}/")
.get() (err, res, body) ->
json = JSON.parse(body)
name = json['name']
status = json['status']
msg.send "Server: #{name} \nStatus: #{status}"
robot.respond /elasticsearch query (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
search msg, msg.match[1], msg.match[2], (text) ->
msg.send(text)
robot.respond /elasticsearch node (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
node_health msg, msg.match[1], (text) ->
msg.send text
robot.respond /elasticsearch cluster (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
cluster_health msg, msg.match[1], (text) ->
msg.send text
| 19151 | # Description:
# Get ElasticSearch Cluster Information
#
# Commands:
# hubot: elasticsearch cluster [server] - Gets the cluster information for the given server
# hubot: elasticsearch node [server] - Gets the node information for the given server
# hubot: elasticsearch query [server] [query] - Runs a specific query against an ElasticSearch cluster
#
# Notes:
# The server must be a fqdn (with the port!) to get to the elasticsearch cluster
#
# Author:
# <NAME>
module.exports = (robot) ->
search = (msg, server, query) ->
msg.http("http://#{server}/_search?#{query}")
.get() (err, res, body) ->
json = JSON.parse(body)
firsthit = JSON.stringify(json.hits.hits[0])
msg.send("There are #{json.hits.total} results for the search http://#{server}/_search?#{query} \nThe first result is \n#{firsthit}")
cluster_health = (msg, server) ->
msg.http("http://#{server}/_cluster/health")
.get() (err, res, body) ->
json = JSON.parse(body)
cluster_name = json['cluster']
status = json['status']
number_of_nodes = json['number_of_nodes']
msg.send "Cluster: #{cluster_name} \nStatus: #{status} \n Nodes: #{number_of_nodes}"
node_health = (msg, server) ->
msg.http("http://#{server}/")
.get() (err, res, body) ->
json = JSON.parse(body)
name = json['name']
status = json['status']
msg.send "Server: #{name} \nStatus: #{status}"
robot.respond /elasticsearch query (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
search msg, msg.match[1], msg.match[2], (text) ->
msg.send(text)
robot.respond /elasticsearch node (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
node_health msg, msg.match[1], (text) ->
msg.send text
robot.respond /elasticsearch cluster (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
cluster_health msg, msg.match[1], (text) ->
msg.send text
| true | # Description:
# Get ElasticSearch Cluster Information
#
# Commands:
# hubot: elasticsearch cluster [server] - Gets the cluster information for the given server
# hubot: elasticsearch node [server] - Gets the node information for the given server
# hubot: elasticsearch query [server] [query] - Runs a specific query against an ElasticSearch cluster
#
# Notes:
# The server must be a fqdn (with the port!) to get to the elasticsearch cluster
#
# Author:
# PI:NAME:<NAME>END_PI
module.exports = (robot) ->
search = (msg, server, query) ->
msg.http("http://#{server}/_search?#{query}")
.get() (err, res, body) ->
json = JSON.parse(body)
firsthit = JSON.stringify(json.hits.hits[0])
msg.send("There are #{json.hits.total} results for the search http://#{server}/_search?#{query} \nThe first result is \n#{firsthit}")
cluster_health = (msg, server) ->
msg.http("http://#{server}/_cluster/health")
.get() (err, res, body) ->
json = JSON.parse(body)
cluster_name = json['cluster']
status = json['status']
number_of_nodes = json['number_of_nodes']
msg.send "Cluster: #{cluster_name} \nStatus: #{status} \n Nodes: #{number_of_nodes}"
node_health = (msg, server) ->
msg.http("http://#{server}/")
.get() (err, res, body) ->
json = JSON.parse(body)
name = json['name']
status = json['status']
msg.send "Server: #{name} \nStatus: #{status}"
robot.respond /elasticsearch query (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
search msg, msg.match[1], msg.match[2], (text) ->
msg.send(text)
robot.respond /elasticsearch node (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
node_health msg, msg.match[1], (text) ->
msg.send text
robot.respond /elasticsearch cluster (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
cluster_health msg, msg.match[1], (text) ->
msg.send text
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9987692832946777,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "etTimeout (->\n fs.writeFileSync filepathTwoAbs, \"pardner\"\n return\n), 1000\nassert.doesN... | test/pummel/test-fs-watch-file.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
watchSeenOne = 0
watchSeenTwo = 0
watchSeenThree = 0
watchSeenFour = 0
startDir = process.cwd()
testDir = common.tmpDir
filenameOne = "watch.txt"
filepathOne = path.join(testDir, filenameOne)
filenameTwo = "hasOwnProperty"
filepathTwo = filenameTwo
filepathTwoAbs = path.join(testDir, filenameTwo)
filenameThree = "charm" # because the third time is
filenameFour = "get"
process.on "exit", ->
fs.unlinkSync filepathOne
fs.unlinkSync filepathTwoAbs
fs.unlinkSync filenameThree
fs.unlinkSync filenameFour
assert.equal 1, watchSeenOne
assert.equal 2, watchSeenTwo
assert.equal 1, watchSeenThree
assert.equal 1, watchSeenFour
return
fs.writeFileSync filepathOne, "hello"
assert.throws (->
fs.watchFile filepathOne
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
fs.watchFile filepathOne, (curr, prev) ->
fs.unwatchFile filepathOne
++watchSeenOne
return
return
setTimeout (->
fs.writeFileSync filepathOne, "world"
return
), 1000
process.chdir testDir
fs.writeFileSync filepathTwoAbs, "howdy"
assert.throws (->
fs.watchFile filepathTwo
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
a = (curr, prev) ->
fs.unwatchFile filepathTwo, a
++watchSeenTwo
return
b = (curr, prev) ->
fs.unwatchFile filepathTwo, b
++watchSeenTwo
return
fs.watchFile filepathTwo, a
fs.watchFile filepathTwo, b
return
setTimeout (->
fs.writeFileSync filepathTwoAbs, "pardner"
return
), 1000
assert.doesNotThrow ->
a = (curr, prev) ->
assert.ok 0 # should not run
return
b = (curr, prev) ->
fs.unwatchFile filenameThree, b
++watchSeenThree
return
fs.watchFile filenameThree, a
fs.watchFile filenameThree, b
fs.unwatchFile filenameThree, a
return
setTimeout (->
fs.writeFileSync filenameThree, "pardner"
return
), 1000
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 200
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 500
assert.doesNotThrow ->
a = (curr, prev) ->
++watchSeenFour
assert.equal 1, watchSeenFour
fs.unwatchFile "." + path.sep + filenameFour, a
return
fs.watchFile filenameFour, a
return
| 220697 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
watchSeenOne = 0
watchSeenTwo = 0
watchSeenThree = 0
watchSeenFour = 0
startDir = process.cwd()
testDir = common.tmpDir
filenameOne = "watch.txt"
filepathOne = path.join(testDir, filenameOne)
filenameTwo = "hasOwnProperty"
filepathTwo = filenameTwo
filepathTwoAbs = path.join(testDir, filenameTwo)
filenameThree = "charm" # because the third time is
filenameFour = "get"
process.on "exit", ->
fs.unlinkSync filepathOne
fs.unlinkSync filepathTwoAbs
fs.unlinkSync filenameThree
fs.unlinkSync filenameFour
assert.equal 1, watchSeenOne
assert.equal 2, watchSeenTwo
assert.equal 1, watchSeenThree
assert.equal 1, watchSeenFour
return
fs.writeFileSync filepathOne, "hello"
assert.throws (->
fs.watchFile filepathOne
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
fs.watchFile filepathOne, (curr, prev) ->
fs.unwatchFile filepathOne
++watchSeenOne
return
return
setTimeout (->
fs.writeFileSync filepathOne, "world"
return
), 1000
process.chdir testDir
fs.writeFileSync filepathTwoAbs, "howdy"
assert.throws (->
fs.watchFile filepathTwo
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
a = (curr, prev) ->
fs.unwatchFile filepathTwo, a
++watchSeenTwo
return
b = (curr, prev) ->
fs.unwatchFile filepathTwo, b
++watchSeenTwo
return
fs.watchFile filepathTwo, a
fs.watchFile filepathTwo, b
return
setTimeout (->
fs.writeFileSync filepathTwoAbs, "<NAME>"
return
), 1000
assert.doesNotThrow ->
a = (curr, prev) ->
assert.ok 0 # should not run
return
b = (curr, prev) ->
fs.unwatchFile filenameThree, b
++watchSeenThree
return
fs.watchFile filenameThree, a
fs.watchFile filenameThree, b
fs.unwatchFile filenameThree, a
return
setTimeout (->
fs.writeFileSync filenameThree, "<NAME>"
return
), 1000
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 200
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 500
assert.doesNotThrow ->
a = (curr, prev) ->
++watchSeenFour
assert.equal 1, watchSeenFour
fs.unwatchFile "." + path.sep + filenameFour, a
return
fs.watchFile filenameFour, a
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
watchSeenOne = 0
watchSeenTwo = 0
watchSeenThree = 0
watchSeenFour = 0
startDir = process.cwd()
testDir = common.tmpDir
filenameOne = "watch.txt"
filepathOne = path.join(testDir, filenameOne)
filenameTwo = "hasOwnProperty"
filepathTwo = filenameTwo
filepathTwoAbs = path.join(testDir, filenameTwo)
filenameThree = "charm" # because the third time is
filenameFour = "get"
process.on "exit", ->
fs.unlinkSync filepathOne
fs.unlinkSync filepathTwoAbs
fs.unlinkSync filenameThree
fs.unlinkSync filenameFour
assert.equal 1, watchSeenOne
assert.equal 2, watchSeenTwo
assert.equal 1, watchSeenThree
assert.equal 1, watchSeenFour
return
fs.writeFileSync filepathOne, "hello"
assert.throws (->
fs.watchFile filepathOne
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
fs.watchFile filepathOne, (curr, prev) ->
fs.unwatchFile filepathOne
++watchSeenOne
return
return
setTimeout (->
fs.writeFileSync filepathOne, "world"
return
), 1000
process.chdir testDir
fs.writeFileSync filepathTwoAbs, "howdy"
assert.throws (->
fs.watchFile filepathTwo
return
), (e) ->
e.message is "watchFile requires a listener function"
assert.doesNotThrow ->
a = (curr, prev) ->
fs.unwatchFile filepathTwo, a
++watchSeenTwo
return
b = (curr, prev) ->
fs.unwatchFile filepathTwo, b
++watchSeenTwo
return
fs.watchFile filepathTwo, a
fs.watchFile filepathTwo, b
return
setTimeout (->
fs.writeFileSync filepathTwoAbs, "PI:NAME:<NAME>END_PI"
return
), 1000
assert.doesNotThrow ->
a = (curr, prev) ->
assert.ok 0 # should not run
return
b = (curr, prev) ->
fs.unwatchFile filenameThree, b
++watchSeenThree
return
fs.watchFile filenameThree, a
fs.watchFile filenameThree, b
fs.unwatchFile filenameThree, a
return
setTimeout (->
fs.writeFileSync filenameThree, "PI:NAME:<NAME>END_PI"
return
), 1000
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 200
setTimeout (->
fs.writeFileSync filenameFour, "hey"
return
), 500
assert.doesNotThrow ->
a = (curr, prev) ->
++watchSeenFour
assert.equal 1, watchSeenFour
fs.unwatchFile "." + path.sep + filenameFour, a
return
fs.watchFile filenameFour, a
return
|
[
{
"context": " \"mailto:#{@get('email')}?subject=#{subject}&cc=inquiries@artsy.net\"\n\n getSimpleWebsite: ->\n return \"\" unless @ge",
"end": 4204,
"score": 0.9985748529434204,
"start": 4185,
"tag": "EMAIL",
"value": "inquiries@artsy.net"
}
] | src/mobile/models/partner.coffee | jo-rs/force | 0 | _ = require 'underscore'
Backbone = require 'backbone'
sd = require('sharify').data
Artist = require '../models/artist.coffee'
Icon = require '../models/icon.coffee'
Profile = require '../models/profile.coffee'
Artists = require '../collections/artists.coffee'
PartnerLocations = require '../collections/partner_locations.coffee'
PartnerShows = require '../collections/partner_shows.coffee'
fetchUntilEnd = require('artsy-backbone-mixins').Fetch().fetchUntilEnd
Relations = require './mixins/relations/partner.coffee'
INSTITUTION = 'institution'
GALLERY_DEFAULT = 'gallery_default'
GALLERY_DEPRECATED = 'gallery_deprecated'
GALLERY_ONE = 'gallery_one'
GALLERY_TWO = 'gallery_two'
GALLERY_THREE = 'gallery_three'
GALLERY_FOUR = 'gallery_four'
GALLERY_FIVE = 'gallery_five'
GALLERY_SIX = 'gallery_six'
GALLERY_SEVEN = 'gallery_seven'
GALLERY_EIGHT = 'gallery_eight'
ACTIVE_PARTNER_LAYOUTS = [ INSTITUTION, GALLERY_ONE, GALLERY_TWO, GALLERY_THREE, GALLERY_FOUR, GALLERY_FIVE, GALLERY_SIX, GALLERY_SEVEN, GALLERY_EIGHT ]
module.exports = class Partner extends Backbone.Model
_.extend @prototype, Relations
urlRoot: "#{sd.API_URL}/api/v1/partner"
href: ->
"/#{@get('default_profile_id')}"
displayName: ->
@get('name')
fetchLocations: ->
@related().locations.fetch arguments...
fetchProfile: (options = {}) ->
new Profile(id: @get('id')).fetch options
nestedProfileImage: ->
img = @get('profile')?.bestAvailableImage()
return (if img? then img else '')
icon: ->
new Icon @get('icon'), profileId: @get('id')
# Fetches the partners artists and groups them into represented and unrepresented.
# The success callback provides (representedArtists, unrepresentedArtists).
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchArtistGroups: (options = {}) ->
partnerArtists = new Backbone.Collection
partnerArtists.url = "#{sd.API_URL}/api/v1/partner/#{@get 'id'}/partner_artists"
fetchUntilEnd.call partnerArtists,
data:
display_on_partner_profile: 1
size: 20
partner_id: @get('id')
artists: 'artists'
success: =>
# Represented artists are flagged as represented but don't need artworks
representedArtists = []
# Unrepresented artists have artworks but are not represented
unrepresentedArtists = []
for pa in partnerArtists.models
if pa.get('represented_by')
representedArtists.push @artistFromPartnerArtist(pa)
else if pa.get('published_artworks_count') > 0
unrepresentedArtists.push @artistFromPartnerArtist(pa)
options.success?(
new Artists (representedArtists)
new Artists (unrepresentedArtists)
)
error: options.error
# Fetches the partner's shows and returns one as featured.
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchFeaturedShow: (options = {}) ->
partnerShows = new PartnerShows(null, partnerId: @get 'id')
fetchUntilEnd.call partnerShows,
data:
partner_id: @get('id')
shows: 'shows'
success: ->
options.success?(partnerShows.featuredShow())
error: options.error
hasSection: (section, profile, articles) ->
switch section
when 'articles'
articles.length > 0
when 'artists'
profile.isGallery() and @get('partner_artists_count') > 0
when 'collection'
profile.isInstitution() and @get('published_not_for_sale_artworks_count') > 0
when 'shop'
profile.isInstitution() and @get('published_for_sale_artworks_count') > 0
when 'shows'
@get('displayable_shows_count') > 0
else
false
setEmailFromLocations: (partnerLocations) ->
return if @get 'email'
try
@set 'email', partnerLocations.first().get('email')
getMailTo: ->
return "" unless @get('email') and @get('type') is 'Gallery'
subject = encodeURIComponent "Connecting with #{@get('name')} via Artsy"
"mailto:#{@get('email')}?subject=#{subject}&cc=inquiries@artsy.net"
getSimpleWebsite: ->
return "" unless @get('website')
@get('website').replace('http://', '').replace(/\/$/g, '')
artistFromPartnerArtist: (partnerArtist) ->
artist = new Artist partnerArtist.get('artist')
# Rewrite image_url to use partner's cover image if exists
if partnerArtist.has('image_versions') and partnerArtist.has('image_url')
artist.set 'image_url': partnerArtist.get('image_url')
artist.set 'image_versions': partnerArtist.get('image_versions')
artist
module.exports.INSTITUTION = INSTITUTION
module.exports.GALLERY_DEFAULT = GALLERY_DEFAULT
module.exports.GALLERY_DEPRECATED = GALLERY_DEPRECATED
module.exports.GALLERY_ONE = GALLERY_ONE
module.exports.GALLERY_TWO = GALLERY_TWO
module.exports.GALLERY_THREE = GALLERY_THREE
module.exports.GALLERY_FOUR = GALLERY_FOUR
module.exports.GALLERY_FIVE = GALLERY_FIVE
module.exports.GALLERY_SIX = GALLERY_SIX
module.exports.GALLERY_SEVEN = GALLERY_SEVEN
module.exports.GALLERY_EIGHT = GALLERY_EIGHT
module.exports.ACTIVE_PARTNER_LAYOUTS = ACTIVE_PARTNER_LAYOUTS | 129868 | _ = require 'underscore'
Backbone = require 'backbone'
sd = require('sharify').data
Artist = require '../models/artist.coffee'
Icon = require '../models/icon.coffee'
Profile = require '../models/profile.coffee'
Artists = require '../collections/artists.coffee'
PartnerLocations = require '../collections/partner_locations.coffee'
PartnerShows = require '../collections/partner_shows.coffee'
fetchUntilEnd = require('artsy-backbone-mixins').Fetch().fetchUntilEnd
Relations = require './mixins/relations/partner.coffee'
INSTITUTION = 'institution'
GALLERY_DEFAULT = 'gallery_default'
GALLERY_DEPRECATED = 'gallery_deprecated'
GALLERY_ONE = 'gallery_one'
GALLERY_TWO = 'gallery_two'
GALLERY_THREE = 'gallery_three'
GALLERY_FOUR = 'gallery_four'
GALLERY_FIVE = 'gallery_five'
GALLERY_SIX = 'gallery_six'
GALLERY_SEVEN = 'gallery_seven'
GALLERY_EIGHT = 'gallery_eight'
ACTIVE_PARTNER_LAYOUTS = [ INSTITUTION, GALLERY_ONE, GALLERY_TWO, GALLERY_THREE, GALLERY_FOUR, GALLERY_FIVE, GALLERY_SIX, GALLERY_SEVEN, GALLERY_EIGHT ]
module.exports = class Partner extends Backbone.Model
_.extend @prototype, Relations
urlRoot: "#{sd.API_URL}/api/v1/partner"
href: ->
"/#{@get('default_profile_id')}"
displayName: ->
@get('name')
fetchLocations: ->
@related().locations.fetch arguments...
fetchProfile: (options = {}) ->
new Profile(id: @get('id')).fetch options
nestedProfileImage: ->
img = @get('profile')?.bestAvailableImage()
return (if img? then img else '')
icon: ->
new Icon @get('icon'), profileId: @get('id')
# Fetches the partners artists and groups them into represented and unrepresented.
# The success callback provides (representedArtists, unrepresentedArtists).
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchArtistGroups: (options = {}) ->
partnerArtists = new Backbone.Collection
partnerArtists.url = "#{sd.API_URL}/api/v1/partner/#{@get 'id'}/partner_artists"
fetchUntilEnd.call partnerArtists,
data:
display_on_partner_profile: 1
size: 20
partner_id: @get('id')
artists: 'artists'
success: =>
# Represented artists are flagged as represented but don't need artworks
representedArtists = []
# Unrepresented artists have artworks but are not represented
unrepresentedArtists = []
for pa in partnerArtists.models
if pa.get('represented_by')
representedArtists.push @artistFromPartnerArtist(pa)
else if pa.get('published_artworks_count') > 0
unrepresentedArtists.push @artistFromPartnerArtist(pa)
options.success?(
new Artists (representedArtists)
new Artists (unrepresentedArtists)
)
error: options.error
# Fetches the partner's shows and returns one as featured.
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchFeaturedShow: (options = {}) ->
partnerShows = new PartnerShows(null, partnerId: @get 'id')
fetchUntilEnd.call partnerShows,
data:
partner_id: @get('id')
shows: 'shows'
success: ->
options.success?(partnerShows.featuredShow())
error: options.error
hasSection: (section, profile, articles) ->
switch section
when 'articles'
articles.length > 0
when 'artists'
profile.isGallery() and @get('partner_artists_count') > 0
when 'collection'
profile.isInstitution() and @get('published_not_for_sale_artworks_count') > 0
when 'shop'
profile.isInstitution() and @get('published_for_sale_artworks_count') > 0
when 'shows'
@get('displayable_shows_count') > 0
else
false
setEmailFromLocations: (partnerLocations) ->
return if @get 'email'
try
@set 'email', partnerLocations.first().get('email')
getMailTo: ->
return "" unless @get('email') and @get('type') is 'Gallery'
subject = encodeURIComponent "Connecting with #{@get('name')} via Artsy"
"mailto:#{@get('email')}?subject=#{subject}&cc=<EMAIL>"
getSimpleWebsite: ->
return "" unless @get('website')
@get('website').replace('http://', '').replace(/\/$/g, '')
artistFromPartnerArtist: (partnerArtist) ->
artist = new Artist partnerArtist.get('artist')
# Rewrite image_url to use partner's cover image if exists
if partnerArtist.has('image_versions') and partnerArtist.has('image_url')
artist.set 'image_url': partnerArtist.get('image_url')
artist.set 'image_versions': partnerArtist.get('image_versions')
artist
module.exports.INSTITUTION = INSTITUTION
module.exports.GALLERY_DEFAULT = GALLERY_DEFAULT
module.exports.GALLERY_DEPRECATED = GALLERY_DEPRECATED
module.exports.GALLERY_ONE = GALLERY_ONE
module.exports.GALLERY_TWO = GALLERY_TWO
module.exports.GALLERY_THREE = GALLERY_THREE
module.exports.GALLERY_FOUR = GALLERY_FOUR
module.exports.GALLERY_FIVE = GALLERY_FIVE
module.exports.GALLERY_SIX = GALLERY_SIX
module.exports.GALLERY_SEVEN = GALLERY_SEVEN
module.exports.GALLERY_EIGHT = GALLERY_EIGHT
module.exports.ACTIVE_PARTNER_LAYOUTS = ACTIVE_PARTNER_LAYOUTS | true | _ = require 'underscore'
Backbone = require 'backbone'
sd = require('sharify').data
Artist = require '../models/artist.coffee'
Icon = require '../models/icon.coffee'
Profile = require '../models/profile.coffee'
Artists = require '../collections/artists.coffee'
PartnerLocations = require '../collections/partner_locations.coffee'
PartnerShows = require '../collections/partner_shows.coffee'
fetchUntilEnd = require('artsy-backbone-mixins').Fetch().fetchUntilEnd
Relations = require './mixins/relations/partner.coffee'
INSTITUTION = 'institution'
GALLERY_DEFAULT = 'gallery_default'
GALLERY_DEPRECATED = 'gallery_deprecated'
GALLERY_ONE = 'gallery_one'
GALLERY_TWO = 'gallery_two'
GALLERY_THREE = 'gallery_three'
GALLERY_FOUR = 'gallery_four'
GALLERY_FIVE = 'gallery_five'
GALLERY_SIX = 'gallery_six'
GALLERY_SEVEN = 'gallery_seven'
GALLERY_EIGHT = 'gallery_eight'
ACTIVE_PARTNER_LAYOUTS = [ INSTITUTION, GALLERY_ONE, GALLERY_TWO, GALLERY_THREE, GALLERY_FOUR, GALLERY_FIVE, GALLERY_SIX, GALLERY_SEVEN, GALLERY_EIGHT ]
module.exports = class Partner extends Backbone.Model
_.extend @prototype, Relations
urlRoot: "#{sd.API_URL}/api/v1/partner"
href: ->
"/#{@get('default_profile_id')}"
displayName: ->
@get('name')
fetchLocations: ->
@related().locations.fetch arguments...
fetchProfile: (options = {}) ->
new Profile(id: @get('id')).fetch options
nestedProfileImage: ->
img = @get('profile')?.bestAvailableImage()
return (if img? then img else '')
icon: ->
new Icon @get('icon'), profileId: @get('id')
# Fetches the partners artists and groups them into represented and unrepresented.
# The success callback provides (representedArtists, unrepresentedArtists).
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchArtistGroups: (options = {}) ->
partnerArtists = new Backbone.Collection
partnerArtists.url = "#{sd.API_URL}/api/v1/partner/#{@get 'id'}/partner_artists"
fetchUntilEnd.call partnerArtists,
data:
display_on_partner_profile: 1
size: 20
partner_id: @get('id')
artists: 'artists'
success: =>
# Represented artists are flagged as represented but don't need artworks
representedArtists = []
# Unrepresented artists have artworks but are not represented
unrepresentedArtists = []
for pa in partnerArtists.models
if pa.get('represented_by')
representedArtists.push @artistFromPartnerArtist(pa)
else if pa.get('published_artworks_count') > 0
unrepresentedArtists.push @artistFromPartnerArtist(pa)
options.success?(
new Artists (representedArtists)
new Artists (unrepresentedArtists)
)
error: options.error
# Fetches the partner's shows and returns one as featured.
#
# @param {Object} options Provide `success` and `error` callbacks similar to Backbone's fetch
fetchFeaturedShow: (options = {}) ->
partnerShows = new PartnerShows(null, partnerId: @get 'id')
fetchUntilEnd.call partnerShows,
data:
partner_id: @get('id')
shows: 'shows'
success: ->
options.success?(partnerShows.featuredShow())
error: options.error
hasSection: (section, profile, articles) ->
switch section
when 'articles'
articles.length > 0
when 'artists'
profile.isGallery() and @get('partner_artists_count') > 0
when 'collection'
profile.isInstitution() and @get('published_not_for_sale_artworks_count') > 0
when 'shop'
profile.isInstitution() and @get('published_for_sale_artworks_count') > 0
when 'shows'
@get('displayable_shows_count') > 0
else
false
setEmailFromLocations: (partnerLocations) ->
return if @get 'email'
try
@set 'email', partnerLocations.first().get('email')
getMailTo: ->
return "" unless @get('email') and @get('type') is 'Gallery'
subject = encodeURIComponent "Connecting with #{@get('name')} via Artsy"
"mailto:#{@get('email')}?subject=#{subject}&cc=PI:EMAIL:<EMAIL>END_PI"
getSimpleWebsite: ->
return "" unless @get('website')
@get('website').replace('http://', '').replace(/\/$/g, '')
artistFromPartnerArtist: (partnerArtist) ->
artist = new Artist partnerArtist.get('artist')
# Rewrite image_url to use partner's cover image if exists
if partnerArtist.has('image_versions') and partnerArtist.has('image_url')
artist.set 'image_url': partnerArtist.get('image_url')
artist.set 'image_versions': partnerArtist.get('image_versions')
artist
module.exports.INSTITUTION = INSTITUTION
module.exports.GALLERY_DEFAULT = GALLERY_DEFAULT
module.exports.GALLERY_DEPRECATED = GALLERY_DEPRECATED
module.exports.GALLERY_ONE = GALLERY_ONE
module.exports.GALLERY_TWO = GALLERY_TWO
module.exports.GALLERY_THREE = GALLERY_THREE
module.exports.GALLERY_FOUR = GALLERY_FOUR
module.exports.GALLERY_FIVE = GALLERY_FIVE
module.exports.GALLERY_SIX = GALLERY_SIX
module.exports.GALLERY_SEVEN = GALLERY_SEVEN
module.exports.GALLERY_EIGHT = GALLERY_EIGHT
module.exports.ACTIVE_PARTNER_LAYOUTS = ACTIVE_PARTNER_LAYOUTS |
[
{
"context": "#\t> File Name: app.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@gmail.com\n#\t> Created Time: W",
"end": 40,
"score": 0.9978066086769104,
"start": 38,
"tag": "USERNAME",
"value": "LY"
},
{
"context": "#\t> File Name: app.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@g... | server/app.coffee | Booker-Z/MIAC-website | 0 | # > File Name: app.coffee
# > Author: LY
# > Mail: ly.franky@gmail.com
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* profiling
###
#require('look').start()
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
AsyncProfile = require 'async-profile'
require('nodetime').profile({
accountKey: 'b4548cd536a0e53bc795df5044e9d57fc5459953',
appName: 'Node.js Application'
})
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/Register', registerRoute
app.use '/Log', logRoute
app.use '/About', aboutRoute
app.use '/Article', articleRoute
app.use '/Message', messageRoute
app.use '/Works', worksRoute
app.use '/Discuss', discussRoute
app.use '/User', userRoute
app.use '/Album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
| 164848 | # > File Name: app.coffee
# > Author: LY
# > Mail: <EMAIL>
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* profiling
###
#require('look').start()
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
AsyncProfile = require 'async-profile'
require('nodetime').profile({
accountKey: '<KEY>',
appName: 'Node.js Application'
})
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/Register', registerRoute
app.use '/Log', logRoute
app.use '/About', aboutRoute
app.use '/Article', articleRoute
app.use '/Message', messageRoute
app.use '/Works', worksRoute
app.use '/Discuss', discussRoute
app.use '/User', userRoute
app.use '/Album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
| true | # > File Name: app.coffee
# > Author: LY
# > Mail: PI:EMAIL:<EMAIL>END_PI
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* profiling
###
#require('look').start()
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
AsyncProfile = require 'async-profile'
require('nodetime').profile({
accountKey: 'PI:KEY:<KEY>END_PI',
appName: 'Node.js Application'
})
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/Register', registerRoute
app.use '/Log', logRoute
app.use '/About', aboutRoute
app.use '/Article', articleRoute
app.use '/Message', messageRoute
app.use '/Works', worksRoute
app.use '/Discuss', discussRoute
app.use '/User', userRoute
app.use '/Album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
|
[
{
"context": "lishableKey = if application.isProduction() then 'pk_live_27jQZozjDGN1HSUTnSuM578g' else 'pk_test_zG5UwVu6Ww8YhtE9ZYh0JO6a'\n\nif me.i",
"end": 85,
"score": 0.9977640509605408,
"start": 53,
"tag": "KEY",
"value": "pk_live_27jQZozjDGN1HSUTnSuM578g"
},
{
"context": "n() t... | app/core/services/stripe.coffee | l34kr/codecombat | 2 | publishableKey = if application.isProduction() then 'pk_live_27jQZozjDGN1HSUTnSuM578g' else 'pk_test_zG5UwVu6Ww8YhtE9ZYh0JO6a'
if me.isAnonymous()
module.exports = {}
else if not StripeCheckout?
module.exports = {}
console.error "Failure loading StripeCheckout API, returning empty object."
else
module.exports = handler = StripeCheckout.configure({
key: publishableKey
name: 'CodeCombat'
email: me.get('email')
image: "https://codecombat.com/images/pages/base/logo_square_250.png"
token: (token) ->
handler.trigger 'received-token', { token: token }
Backbone.Mediator.publish 'stripe:received-token', { token: token }
locale: 'auto'
})
_.extend(handler, Backbone.Events)
| 188078 | publishableKey = if application.isProduction() then '<KEY>' else '<KEY>'
if me.isAnonymous()
module.exports = {}
else if not StripeCheckout?
module.exports = {}
console.error "Failure loading StripeCheckout API, returning empty object."
else
module.exports = handler = StripeCheckout.configure({
key: publishableKey
name: 'CodeCombat'
email: me.get('email')
image: "https://codecombat.com/images/pages/base/logo_square_250.png"
token: (token) ->
handler.trigger 'received-token', { token: token }
Backbone.Mediator.publish 'stripe:received-token', { token: token }
locale: 'auto'
})
_.extend(handler, Backbone.Events)
| true | publishableKey = if application.isProduction() then 'PI:KEY:<KEY>END_PI' else 'PI:KEY:<KEY>END_PI'
if me.isAnonymous()
module.exports = {}
else if not StripeCheckout?
module.exports = {}
console.error "Failure loading StripeCheckout API, returning empty object."
else
module.exports = handler = StripeCheckout.configure({
key: publishableKey
name: 'CodeCombat'
email: me.get('email')
image: "https://codecombat.com/images/pages/base/logo_square_250.png"
token: (token) ->
handler.trigger 'received-token', { token: token }
Backbone.Mediator.publish 'stripe:received-token', { token: token }
locale: 'auto'
})
_.extend(handler, Backbone.Events)
|
[
{
"context": "ntity: ->\n @getText().then (val) ->\n 'Iron Man' if val is 'Tony Stark'\n",
"end": 178,
"score": 0.7175605893135071,
"start": 170,
"tag": "NAME",
"value": "Iron Man"
},
{
"context": "ext().then (val) ->\n 'Iron Man' if val is 'Tony Stark'\n",
"en... | test/integration/widgets/list_item.coffee | piretmadrus/newPioneer | 203 | module.exports = ->
this.Widgets = this.Widgets || {}
return this.Widgets.ListItem = this.Widget.extend
getIdentity: ->
@getText().then (val) ->
'Iron Man' if val is 'Tony Stark'
| 8758 | module.exports = ->
this.Widgets = this.Widgets || {}
return this.Widgets.ListItem = this.Widget.extend
getIdentity: ->
@getText().then (val) ->
'<NAME>' if val is '<NAME>'
| true | module.exports = ->
this.Widgets = this.Widgets || {}
return this.Widgets.ListItem = this.Widget.extend
getIdentity: ->
@getText().then (val) ->
'PI:NAME:<NAME>END_PI' if val is 'PI:NAME:<NAME>END_PI'
|
[
{
"context": " push_counters = {}\n patterns =\n key: /[a-zA-Z0-9_-]+|(?=\\[\\])/g\n push: /^$/\n fixed: /^\\d+$/\n",
"end": 491,
"score": 0.781682014465332,
"start": 480,
"tag": "KEY",
"value": "zA-Z0-9_-]+"
}
] | app/assets/javascripts/lib/utils/serialize_form.coffee | sheedy/rizzo | 1 | # ------------------------------------------------------------------------------
# Returns a serialized object of a set of form parameters
# Results are defined by the name of the inputs
#
# Depth of the array is defined by sq brackets eg. name="search[from]"
# will return {search:{from: ''}}
# ------------------------------------------------------------------------------
define ->
class SerializeForm
self = this
push_counters = {}
patterns =
key: /[a-zA-Z0-9_-]+|(?=\[\])/g
push: /^$/
fixed: /^\d+$/
named: /^[a-zA-Z0-9_-]+$/
self.build = (base, key, value) ->
base[key] = value
base
self.push_counter = (key, i) ->
push_counters[key] = 0 if push_counters[key] is undefined
if i is undefined
push_counters[key]++
else push_counters[key] = ++i if i isnt undefined and i > push_counters[key]
constructor: (form)->
push_counters = {}
if form.jquery is undefined then form = $(form)
return buildObject(form, {})
buildObject = (form, formParams) ->
$.each form.serializeArray(), ->
k = undefined
keys = @name.match(patterns.key)
merge = (if @value is 'on' then true else @value)
reverse_key = @name
while (k = keys.pop()) isnt undefined
reverse_key = reverse_key.replace(new RegExp("\\[" + k + "\\]$"), "")
if k.match(patterns.push)
merge = self.build([], self.push_counter(reverse_key), merge)
else if k.match(patterns.fixed)
self.push_counter reverse_key, k
merge = self.build([], k, merge)
else merge = self.build({}, k, merge) if k.match(patterns.named)
formParams = $.extend(true, formParams, merge)
formParams
| 87055 | # ------------------------------------------------------------------------------
# Returns a serialized object of a set of form parameters
# Results are defined by the name of the inputs
#
# Depth of the array is defined by sq brackets eg. name="search[from]"
# will return {search:{from: ''}}
# ------------------------------------------------------------------------------
define ->
class SerializeForm
self = this
push_counters = {}
patterns =
key: /[a-<KEY>|(?=\[\])/g
push: /^$/
fixed: /^\d+$/
named: /^[a-zA-Z0-9_-]+$/
self.build = (base, key, value) ->
base[key] = value
base
self.push_counter = (key, i) ->
push_counters[key] = 0 if push_counters[key] is undefined
if i is undefined
push_counters[key]++
else push_counters[key] = ++i if i isnt undefined and i > push_counters[key]
constructor: (form)->
push_counters = {}
if form.jquery is undefined then form = $(form)
return buildObject(form, {})
buildObject = (form, formParams) ->
$.each form.serializeArray(), ->
k = undefined
keys = @name.match(patterns.key)
merge = (if @value is 'on' then true else @value)
reverse_key = @name
while (k = keys.pop()) isnt undefined
reverse_key = reverse_key.replace(new RegExp("\\[" + k + "\\]$"), "")
if k.match(patterns.push)
merge = self.build([], self.push_counter(reverse_key), merge)
else if k.match(patterns.fixed)
self.push_counter reverse_key, k
merge = self.build([], k, merge)
else merge = self.build({}, k, merge) if k.match(patterns.named)
formParams = $.extend(true, formParams, merge)
formParams
| true | # ------------------------------------------------------------------------------
# Returns a serialized object of a set of form parameters
# Results are defined by the name of the inputs
#
# Depth of the array is defined by sq brackets eg. name="search[from]"
# will return {search:{from: ''}}
# ------------------------------------------------------------------------------
define ->
class SerializeForm
self = this
push_counters = {}
patterns =
key: /[a-PI:KEY:<KEY>END_PI|(?=\[\])/g
push: /^$/
fixed: /^\d+$/
named: /^[a-zA-Z0-9_-]+$/
self.build = (base, key, value) ->
base[key] = value
base
self.push_counter = (key, i) ->
push_counters[key] = 0 if push_counters[key] is undefined
if i is undefined
push_counters[key]++
else push_counters[key] = ++i if i isnt undefined and i > push_counters[key]
constructor: (form)->
push_counters = {}
if form.jquery is undefined then form = $(form)
return buildObject(form, {})
buildObject = (form, formParams) ->
$.each form.serializeArray(), ->
k = undefined
keys = @name.match(patterns.key)
merge = (if @value is 'on' then true else @value)
reverse_key = @name
while (k = keys.pop()) isnt undefined
reverse_key = reverse_key.replace(new RegExp("\\[" + k + "\\]$"), "")
if k.match(patterns.push)
merge = self.build([], self.push_counter(reverse_key), merge)
else if k.match(patterns.fixed)
self.push_counter reverse_key, k
merge = self.build([], k, merge)
else merge = self.build({}, k, merge) if k.match(patterns.named)
formParams = $.extend(true, formParams, merge)
formParams
|
[
{
"context": " command = new Command({\n project: '/home/fabian/.atom/packages/build-tools/spec/fixtures'\n n",
"end": 215,
"score": 0.9972490072250366,
"start": 209,
"tag": "USERNAME",
"value": "fabian"
},
{
"context": "/packages/build-tools/spec/fixtures'\n name: ... | spec/modifier-shell-spec.coffee | fstiewitz/build-tools-cpp | 3 | Shell = require '../lib/modifier/shell'
Command = require '../lib/provider/command'
describe 'Command Modifier - Shell', ->
command = null
beforeEach ->
command = new Command({
project: '/home/fabian/.atom/packages/build-tools/spec/fixtures'
name: 'Test'
command: 'echo Hello World'
wd: '.'
env: {}
modifier:
shell:
command: 'bash -c'
stdout:
highlighting: 'nh'
stderr:
highlighting: 'nh'
output:
console:
close_success: false
version: 1
})
command.getSpawnInfo()
Shell.postSplit command
it 'returns valid data', ->
expect(command.command).toBe 'bash'
expect(command.args).toEqual ['-c', 'echo Hello World']
| 169855 | Shell = require '../lib/modifier/shell'
Command = require '../lib/provider/command'
describe 'Command Modifier - Shell', ->
command = null
beforeEach ->
command = new Command({
project: '/home/fabian/.atom/packages/build-tools/spec/fixtures'
name: '<NAME>'
command: 'echo Hello World'
wd: '.'
env: {}
modifier:
shell:
command: 'bash -c'
stdout:
highlighting: 'nh'
stderr:
highlighting: 'nh'
output:
console:
close_success: false
version: 1
})
command.getSpawnInfo()
Shell.postSplit command
it 'returns valid data', ->
expect(command.command).toBe 'bash'
expect(command.args).toEqual ['-c', 'echo Hello World']
| true | Shell = require '../lib/modifier/shell'
Command = require '../lib/provider/command'
describe 'Command Modifier - Shell', ->
command = null
beforeEach ->
command = new Command({
project: '/home/fabian/.atom/packages/build-tools/spec/fixtures'
name: 'PI:NAME:<NAME>END_PI'
command: 'echo Hello World'
wd: '.'
env: {}
modifier:
shell:
command: 'bash -c'
stdout:
highlighting: 'nh'
stderr:
highlighting: 'nh'
output:
console:
close_success: false
version: 1
})
command.getSpawnInfo()
Shell.postSplit command
it 'returns valid data', ->
expect(command.command).toBe 'bash'
expect(command.args).toEqual ['-c', 'echo Hello World']
|
[
{
"context": "###\nCopyright 2016 Balena\n\nLicensed under the Apache License, Version 2.0 (",
"end": 25,
"score": 0.9857879877090454,
"start": 19,
"tag": "NAME",
"value": "Balena"
},
{
"context": "write '/dev/disk2', 'raspberry-pi',\n# \tusername: 'foobar'\n# .then ->\n# \tconsole.l... | lib/config.coffee | webmasterkai/resin-config-json | 0 | ###
Copyright 2016 Balena
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
# @module config
###
Promise = require('bluebird')
imagefs = require('resin-image-fs')
utils = require('./utils')
###*
# @summary Read a config.json from an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
#
# @fulfil {Object} - config.json
# @returns {Promise}
#
# @example
# config.read('/dev/disk2', 'raspberry-pi').then (config) ->
# console.log(config)
###
exports.read = (image, type) ->
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
imagefs.readFile
image: image
partition: configuration.partition
path: configuration.path
.then(JSON.parse)
###*
# @summary Write a config.json to an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
# @param {Object} config - config.json
#
# @returns {Promise}
#
# @example
# config.write '/dev/disk2', 'raspberry-pi',
# username: 'foobar'
# .then ->
# console.log('Done!')
###
exports.write = (image, type, config) ->
config = JSON.stringify(config)
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
return imagefs.writeFile
image: image
partition: configuration.partition
path: configuration.path
, config
| 214478 | ###
Copyright 2016 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
# @module config
###
Promise = require('bluebird')
imagefs = require('resin-image-fs')
utils = require('./utils')
###*
# @summary Read a config.json from an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
#
# @fulfil {Object} - config.json
# @returns {Promise}
#
# @example
# config.read('/dev/disk2', 'raspberry-pi').then (config) ->
# console.log(config)
###
exports.read = (image, type) ->
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
imagefs.readFile
image: image
partition: configuration.partition
path: configuration.path
.then(JSON.parse)
###*
# @summary Write a config.json to an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
# @param {Object} config - config.json
#
# @returns {Promise}
#
# @example
# config.write '/dev/disk2', 'raspberry-pi',
# username: 'foobar'
# .then ->
# console.log('Done!')
###
exports.write = (image, type, config) ->
config = JSON.stringify(config)
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
return imagefs.writeFile
image: image
partition: configuration.partition
path: configuration.path
, config
| true | ###
Copyright 2016 PI:NAME:<NAME>END_PI
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
# @module config
###
Promise = require('bluebird')
imagefs = require('resin-image-fs')
utils = require('./utils')
###*
# @summary Read a config.json from an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
#
# @fulfil {Object} - config.json
# @returns {Promise}
#
# @example
# config.read('/dev/disk2', 'raspberry-pi').then (config) ->
# console.log(config)
###
exports.read = (image, type) ->
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
imagefs.readFile
image: image
partition: configuration.partition
path: configuration.path
.then(JSON.parse)
###*
# @summary Write a config.json to an image
# @function
# @public
#
# @param {String} image - image or drive path
# @param {String} type - device type slug
# @param {Object} config - config.json
#
# @returns {Promise}
#
# @example
# config.write '/dev/disk2', 'raspberry-pi',
# username: 'foobar'
# .then ->
# console.log('Done!')
###
exports.write = (image, type, config) ->
config = JSON.stringify(config)
return utils.getConfigPartitionInformationByType(type)
.then (configuration) ->
return imagefs.writeFile
image: image
partition: configuration.partition
path: configuration.path
, config
|
[
{
"context": "kens: @tokensCollection([@tokens('credit', 3)])) # Kati Jones\n\n # Decks\n\n decksArray = []\n _.times(40,",
"end": 1177,
"score": 0.9949029088020325,
"start": 1167,
"tag": "NAME",
"value": "Kati Jones"
}
] | src/coffee/services/default_game.coffee | ShayDavidson/monolith | 0 | class Monolith.Services.DefaultGame
_.extend this::, Monolith.Mixins.Sugar::
generate: ->
# Corp
corpId = @faceUpCard('04097') # GRNDL
corpHand1 = @faceUpCard('01086') # SEA Source
corpHand2 = @faceUpCard('01110') # Hedge Fund
corpHand3 = @faceUpCard('01099') # Scorched Earth
corpHand4 = @faceUpCard('01099') # Scorched Earth
corpHand5 = @faceUpCard('01090') # Tollbooth
corpTrash = @faceUpCard('04040') # Restructure
corpIce1 = @faceUpCard('02110', ice: true, hosted: @cards([@faceUpCard('01012', tokens:@tokensCollection([@tokens('virus', 2)]))])) # Eli 1.0
corpIce2 = @faceDownCard(cardId: '01090', ice: true) # Tollbooth
corpIce3 = @faceDownCard(ice: true)
# Runner
runnerId = @faceUpCard('03028') # Kit
runnerHand1 = @faceUpCard('04109') # Lucky Find
runnerHand2 = @faceUpCard('02047') # Test Run
runnerHand3 = @faceUpCard('04047') # Torch
runnerHand4 = @faceUpCard('01034') # Diesel
runnerCard1 = @faceUpCard('03036') # Monolith
runnerCard2 = @faceUpCard('02089') # Creeper
runnerCard3 = @faceUpCard('02091', tokens: @tokensCollection([@tokens('credit', 3)])) # Kati Jones
# Decks
decksArray = []
_.times(40, => decksArray.push(@faceDownCard()))
# Rows
runnerMainRow = @row([decksArray])
runnerIdRow = @row([runnerId, runnerCard1, runnerCard2])
runnerTempRow = @row([runnerCard3])
runnerRows = @rows([runnerTempRow, runnerIdRow, runnerMainRow, @row()])
corpMainRow = @row([decksArray, corpIce1, corpIce2])
corpIdRow = @row([corpId])
corpArchives = @row([corpTrash, corpIce3])
corpRows = @rows([@row(), corpIdRow, corpMainRow, corpArchives])
# Hands
runnerHand = @cards([runnerHand1, runnerHand2, runnerHand3, runnerHand4])
corpHand = @cards([corpHand1, corpHand2, corpHand3, corpHand4, corpHand5])
# Tokens
runnerCredits = @tokens('credit', 5)
runnerBrainDmg = @tokens('brain-damage', 3)
runnerTags = @tokens('tag', 3)
runnerTokens = @tokensCollection([runnerCredits, runnerTags, runnerBrainDmg])
corpCredits = @tokens('credit', 7)
corpBadPub = @tokens('bad-publicity', 1)
corpTokens = @tokensCollection([corpCredits, corpBadPub])
# Board
runnerModel = @player('runner', 'left', runnerRows, runnerHand, runnerTokens)
corpModel = @player('corp', 'right', corpRows, corpHand, corpTokens)
currentCard = @faceUpCard('06034') # Scrubbed
game = @game(runner: runnerModel, corp: corpModel, current: currentCard)
# Helpers
@game: (options) ->
new Monolith.ViewModels.GameViewModel(options)
@row: (piles = []) ->
piles = _.map(piles, (cards) ->
cards = [cards] unless _.isArray(cards)
new Monolith.ViewModels.PileViewModel(cards: new Backbone.Collection(cards))
)
new Monolith.ViewModels.RowViewModel(piles: new Backbone.Collection(piles))
@rows: (rows) ->
new Backbone.Collection(rows)
@cards: (cards) ->
new Backbone.Collection(cards)
@faceUpCard: (id, options = {}) ->
options.cardId = id
options.faceUp = true
new Monolith.ViewModels.CardViewModel(options)
@faceDownCard: (options = {}) ->
new Monolith.ViewModels.CardViewModel(options)
@player: (type, side, rows, hand, tokens) ->
new Monolith.ViewModels.PlayerViewModel(type: type, side: side, rows: rows, hand: hand, tokens: tokens)
@tokens: (type, amount) ->
new Monolith.ViewModels.TokensViewModel(type: type, amount: amount)
@tokensCollection: (tokens) ->
new Monolith.ViewModels.TokensViewCollection(tokens) | 84383 | class Monolith.Services.DefaultGame
_.extend this::, Monolith.Mixins.Sugar::
generate: ->
# Corp
corpId = @faceUpCard('04097') # GRNDL
corpHand1 = @faceUpCard('01086') # SEA Source
corpHand2 = @faceUpCard('01110') # Hedge Fund
corpHand3 = @faceUpCard('01099') # Scorched Earth
corpHand4 = @faceUpCard('01099') # Scorched Earth
corpHand5 = @faceUpCard('01090') # Tollbooth
corpTrash = @faceUpCard('04040') # Restructure
corpIce1 = @faceUpCard('02110', ice: true, hosted: @cards([@faceUpCard('01012', tokens:@tokensCollection([@tokens('virus', 2)]))])) # Eli 1.0
corpIce2 = @faceDownCard(cardId: '01090', ice: true) # Tollbooth
corpIce3 = @faceDownCard(ice: true)
# Runner
runnerId = @faceUpCard('03028') # Kit
runnerHand1 = @faceUpCard('04109') # Lucky Find
runnerHand2 = @faceUpCard('02047') # Test Run
runnerHand3 = @faceUpCard('04047') # Torch
runnerHand4 = @faceUpCard('01034') # Diesel
runnerCard1 = @faceUpCard('03036') # Monolith
runnerCard2 = @faceUpCard('02089') # Creeper
runnerCard3 = @faceUpCard('02091', tokens: @tokensCollection([@tokens('credit', 3)])) # <NAME>
# Decks
decksArray = []
_.times(40, => decksArray.push(@faceDownCard()))
# Rows
runnerMainRow = @row([decksArray])
runnerIdRow = @row([runnerId, runnerCard1, runnerCard2])
runnerTempRow = @row([runnerCard3])
runnerRows = @rows([runnerTempRow, runnerIdRow, runnerMainRow, @row()])
corpMainRow = @row([decksArray, corpIce1, corpIce2])
corpIdRow = @row([corpId])
corpArchives = @row([corpTrash, corpIce3])
corpRows = @rows([@row(), corpIdRow, corpMainRow, corpArchives])
# Hands
runnerHand = @cards([runnerHand1, runnerHand2, runnerHand3, runnerHand4])
corpHand = @cards([corpHand1, corpHand2, corpHand3, corpHand4, corpHand5])
# Tokens
runnerCredits = @tokens('credit', 5)
runnerBrainDmg = @tokens('brain-damage', 3)
runnerTags = @tokens('tag', 3)
runnerTokens = @tokensCollection([runnerCredits, runnerTags, runnerBrainDmg])
corpCredits = @tokens('credit', 7)
corpBadPub = @tokens('bad-publicity', 1)
corpTokens = @tokensCollection([corpCredits, corpBadPub])
# Board
runnerModel = @player('runner', 'left', runnerRows, runnerHand, runnerTokens)
corpModel = @player('corp', 'right', corpRows, corpHand, corpTokens)
currentCard = @faceUpCard('06034') # Scrubbed
game = @game(runner: runnerModel, corp: corpModel, current: currentCard)
# Helpers
@game: (options) ->
new Monolith.ViewModels.GameViewModel(options)
@row: (piles = []) ->
piles = _.map(piles, (cards) ->
cards = [cards] unless _.isArray(cards)
new Monolith.ViewModels.PileViewModel(cards: new Backbone.Collection(cards))
)
new Monolith.ViewModels.RowViewModel(piles: new Backbone.Collection(piles))
@rows: (rows) ->
new Backbone.Collection(rows)
@cards: (cards) ->
new Backbone.Collection(cards)
@faceUpCard: (id, options = {}) ->
options.cardId = id
options.faceUp = true
new Monolith.ViewModels.CardViewModel(options)
@faceDownCard: (options = {}) ->
new Monolith.ViewModels.CardViewModel(options)
@player: (type, side, rows, hand, tokens) ->
new Monolith.ViewModels.PlayerViewModel(type: type, side: side, rows: rows, hand: hand, tokens: tokens)
@tokens: (type, amount) ->
new Monolith.ViewModels.TokensViewModel(type: type, amount: amount)
@tokensCollection: (tokens) ->
new Monolith.ViewModels.TokensViewCollection(tokens) | true | class Monolith.Services.DefaultGame
_.extend this::, Monolith.Mixins.Sugar::
generate: ->
# Corp
corpId = @faceUpCard('04097') # GRNDL
corpHand1 = @faceUpCard('01086') # SEA Source
corpHand2 = @faceUpCard('01110') # Hedge Fund
corpHand3 = @faceUpCard('01099') # Scorched Earth
corpHand4 = @faceUpCard('01099') # Scorched Earth
corpHand5 = @faceUpCard('01090') # Tollbooth
corpTrash = @faceUpCard('04040') # Restructure
corpIce1 = @faceUpCard('02110', ice: true, hosted: @cards([@faceUpCard('01012', tokens:@tokensCollection([@tokens('virus', 2)]))])) # Eli 1.0
corpIce2 = @faceDownCard(cardId: '01090', ice: true) # Tollbooth
corpIce3 = @faceDownCard(ice: true)
# Runner
runnerId = @faceUpCard('03028') # Kit
runnerHand1 = @faceUpCard('04109') # Lucky Find
runnerHand2 = @faceUpCard('02047') # Test Run
runnerHand3 = @faceUpCard('04047') # Torch
runnerHand4 = @faceUpCard('01034') # Diesel
runnerCard1 = @faceUpCard('03036') # Monolith
runnerCard2 = @faceUpCard('02089') # Creeper
runnerCard3 = @faceUpCard('02091', tokens: @tokensCollection([@tokens('credit', 3)])) # PI:NAME:<NAME>END_PI
# Decks
decksArray = []
_.times(40, => decksArray.push(@faceDownCard()))
# Rows
runnerMainRow = @row([decksArray])
runnerIdRow = @row([runnerId, runnerCard1, runnerCard2])
runnerTempRow = @row([runnerCard3])
runnerRows = @rows([runnerTempRow, runnerIdRow, runnerMainRow, @row()])
corpMainRow = @row([decksArray, corpIce1, corpIce2])
corpIdRow = @row([corpId])
corpArchives = @row([corpTrash, corpIce3])
corpRows = @rows([@row(), corpIdRow, corpMainRow, corpArchives])
# Hands
runnerHand = @cards([runnerHand1, runnerHand2, runnerHand3, runnerHand4])
corpHand = @cards([corpHand1, corpHand2, corpHand3, corpHand4, corpHand5])
# Tokens
runnerCredits = @tokens('credit', 5)
runnerBrainDmg = @tokens('brain-damage', 3)
runnerTags = @tokens('tag', 3)
runnerTokens = @tokensCollection([runnerCredits, runnerTags, runnerBrainDmg])
corpCredits = @tokens('credit', 7)
corpBadPub = @tokens('bad-publicity', 1)
corpTokens = @tokensCollection([corpCredits, corpBadPub])
# Board
runnerModel = @player('runner', 'left', runnerRows, runnerHand, runnerTokens)
corpModel = @player('corp', 'right', corpRows, corpHand, corpTokens)
currentCard = @faceUpCard('06034') # Scrubbed
game = @game(runner: runnerModel, corp: corpModel, current: currentCard)
# Helpers
@game: (options) ->
new Monolith.ViewModels.GameViewModel(options)
@row: (piles = []) ->
piles = _.map(piles, (cards) ->
cards = [cards] unless _.isArray(cards)
new Monolith.ViewModels.PileViewModel(cards: new Backbone.Collection(cards))
)
new Monolith.ViewModels.RowViewModel(piles: new Backbone.Collection(piles))
@rows: (rows) ->
new Backbone.Collection(rows)
@cards: (cards) ->
new Backbone.Collection(cards)
@faceUpCard: (id, options = {}) ->
options.cardId = id
options.faceUp = true
new Monolith.ViewModels.CardViewModel(options)
@faceDownCard: (options = {}) ->
new Monolith.ViewModels.CardViewModel(options)
@player: (type, side, rows, hand, tokens) ->
new Monolith.ViewModels.PlayerViewModel(type: type, side: side, rows: rows, hand: hand, tokens: tokens)
@tokens: (type, amount) ->
new Monolith.ViewModels.TokensViewModel(type: type, amount: amount)
@tokensCollection: (tokens) ->
new Monolith.ViewModels.TokensViewCollection(tokens) |
[
{
"context": "pkg + \"/fixture_npmrc\", \"//localhost:1337/:email = fancy@feast.net\\n\" + \"//localhost:1337/:username = fancy\\n\" + \"//",
"end": 461,
"score": 0.9998690485954285,
"start": 446,
"tag": "EMAIL",
"value": "fancy@feast.net"
},
{
"context": "fancy@feast.net\\n\" + \"//... | deps/npm/test/tap/publish-config.coffee | lxe/io.coffee | 0 | common = require("../common-tap.js")
test = require("tap").test
fs = require("fs")
osenv = require("osenv")
pkg = process.env.npm_config_tmp or "/tmp"
pkg += "/npm-test-publish-config"
require("mkdirp").sync pkg
fs.writeFileSync pkg + "/package.json", JSON.stringify(
name: "npm-test-publish-config"
version: "1.2.3"
publishConfig:
registry: common.registry
), "utf8"
fs.writeFileSync pkg + "/fixture_npmrc", "//localhost:1337/:email = fancy@feast.net\n" + "//localhost:1337/:username = fancy\n" + "//localhost:1337/:_password = " + new Buffer("feast").toString("base64") + "\n" + "registry = http://localhost:1337/"
test (t) ->
child = undefined
require("http").createServer((req, res) ->
t.pass "got request on the fakey fake registry"
t.end()
@close()
res.statusCode = 500
res.end JSON.stringify(error: "sshhh. naptime nao. \\^O^/ <(YAWWWWN!)")
child.kill()
return
).listen common.port, ->
t.pass "server is listening"
# don't much care about listening to the child's results
# just wanna make sure it hits the server we just set up.
#
# there are plenty of other tests to verify that publish
# itself functions normally.
#
# Make sure that we don't sit around waiting for lock files
child = common.npm([
"publish"
"--userconfig=" + pkg + "/fixture_npmrc"
],
cwd: pkg
stdio: "inherit"
env:
npm_config_cache_lock_stale: 1000
npm_config_cache_lock_wait: 1000
HOME: process.env.HOME
Path: process.env.PATH
PATH: process.env.PATH
USERPROFILE: osenv.home()
, (err, code) ->
t.ifError err, "publish command finished successfully"
t.notOk code, "npm install exited with code 0"
return
)
return
return
| 98744 | common = require("../common-tap.js")
test = require("tap").test
fs = require("fs")
osenv = require("osenv")
pkg = process.env.npm_config_tmp or "/tmp"
pkg += "/npm-test-publish-config"
require("mkdirp").sync pkg
fs.writeFileSync pkg + "/package.json", JSON.stringify(
name: "npm-test-publish-config"
version: "1.2.3"
publishConfig:
registry: common.registry
), "utf8"
fs.writeFileSync pkg + "/fixture_npmrc", "//localhost:1337/:email = <EMAIL>\n" + "//localhost:1337/:username = fancy\n" + "//localhost:1337/:_password = " + new Buffer("feast").toString("base64") + "\n" + "registry = http://localhost:1337/"
test (t) ->
child = undefined
require("http").createServer((req, res) ->
t.pass "got request on the fakey fake registry"
t.end()
@close()
res.statusCode = 500
res.end JSON.stringify(error: "sshhh. naptime nao. \\^O^/ <(YAWWWWN!)")
child.kill()
return
).listen common.port, ->
t.pass "server is listening"
# don't much care about listening to the child's results
# just wanna make sure it hits the server we just set up.
#
# there are plenty of other tests to verify that publish
# itself functions normally.
#
# Make sure that we don't sit around waiting for lock files
child = common.npm([
"publish"
"--userconfig=" + pkg + "/fixture_npmrc"
],
cwd: pkg
stdio: "inherit"
env:
npm_config_cache_lock_stale: 1000
npm_config_cache_lock_wait: 1000
HOME: process.env.HOME
Path: process.env.PATH
PATH: process.env.PATH
USERPROFILE: osenv.home()
, (err, code) ->
t.ifError err, "publish command finished successfully"
t.notOk code, "npm install exited with code 0"
return
)
return
return
| true | common = require("../common-tap.js")
test = require("tap").test
fs = require("fs")
osenv = require("osenv")
pkg = process.env.npm_config_tmp or "/tmp"
pkg += "/npm-test-publish-config"
require("mkdirp").sync pkg
fs.writeFileSync pkg + "/package.json", JSON.stringify(
name: "npm-test-publish-config"
version: "1.2.3"
publishConfig:
registry: common.registry
), "utf8"
fs.writeFileSync pkg + "/fixture_npmrc", "//localhost:1337/:email = PI:EMAIL:<EMAIL>END_PI\n" + "//localhost:1337/:username = fancy\n" + "//localhost:1337/:_password = " + new Buffer("feast").toString("base64") + "\n" + "registry = http://localhost:1337/"
test (t) ->
child = undefined
require("http").createServer((req, res) ->
t.pass "got request on the fakey fake registry"
t.end()
@close()
res.statusCode = 500
res.end JSON.stringify(error: "sshhh. naptime nao. \\^O^/ <(YAWWWWN!)")
child.kill()
return
).listen common.port, ->
t.pass "server is listening"
# don't much care about listening to the child's results
# just wanna make sure it hits the server we just set up.
#
# there are plenty of other tests to verify that publish
# itself functions normally.
#
# Make sure that we don't sit around waiting for lock files
child = common.npm([
"publish"
"--userconfig=" + pkg + "/fixture_npmrc"
],
cwd: pkg
stdio: "inherit"
env:
npm_config_cache_lock_stale: 1000
npm_config_cache_lock_wait: 1000
HOME: process.env.HOME
Path: process.env.PATH
PATH: process.env.PATH
USERPROFILE: osenv.home()
, (err, code) ->
t.ifError err, "publish command finished successfully"
t.notOk code, "npm install exited with code 0"
return
)
return
return
|
[
{
"context": "etch()\r\n\r\n expectedNames = [\"Administrator\",\"Bob McBobertan\",\"Ian B\",\"Ian Culling\",\"Matt Higgins\"]\r\n exp",
"end": 706,
"score": 0.9998835325241089,
"start": 692,
"tag": "NAME",
"value": "Bob McBobertan"
},
{
"context": "xpectedNames = [\"Admini... | tests/Backbone.sync.Tests.coffee | versionone/V1.Backbone | 1 | Backbone = require('backbone')
V1 = require('../V1.Backbone')
expect = require('chai').expect
recorded = require('./recorded')
deferred = require('JQDeferred')
describe "Fetching with `sync`", ->
describe "a collection", ->
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","Bob McBobertan","Ian B","Ian Culling","Matt Higgins"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["Administrator","Matt Higgins"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["Administrator"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection with a mixin", ->
Member = Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
Members = Backbone.Collection.extend
model: Member
V1.Backbone.mixInTo(Member)
V1.Backbone.mixInTo(Members)
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","Bob McBobertan","Ian B","Ian Culling","Matt Higgins"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["Administrator","Matt Higgins"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["Administrator"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection, with relations", ->
Expressions = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Expression"
schema: "Content"
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [
"Name"
V1.Backbone.alias("ParticipatesInConversations.@Count").as("ParticipationCount")
V1.Backbone.relation("ParticipatesInConversations").of(Expressions)
]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "will get members and the expressions they participate in", ->
members = new Members()
members.fetch()
members.each (member) ->
expect(member.get("ParticipatesInConversations").length).to.equal(parseInt(member.get("ParticipationCount"),10))
describe "a model", ->
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can sync an indiviual model", ->
Member = V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
admin = new Member(_oid: "Member:1017")
admin.fetch()
expect(admin.get("Name")).to.equal("Bob McBobertan")
| 124317 | Backbone = require('backbone')
V1 = require('../V1.Backbone')
expect = require('chai').expect
recorded = require('./recorded')
deferred = require('JQDeferred')
describe "Fetching with `sync`", ->
describe "a collection", ->
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","<NAME>","<NAME>","<NAME>","<NAME>"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["<NAME>","<NAME>"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["<NAME>"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection with a mixin", ->
Member = Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
Members = Backbone.Collection.extend
model: Member
V1.Backbone.mixInTo(Member)
V1.Backbone.mixInTo(Members)
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","<NAME>","<NAME>","<NAME>","<NAME>"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["Administrator","<NAME>"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["Administrator"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection, with relations", ->
Expressions = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Expression"
schema: "Content"
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [
"Name"
V1.Backbone.alias("ParticipatesInConversations.@Count").as("ParticipationCount")
V1.Backbone.relation("ParticipatesInConversations").of(Expressions)
]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "will get members and the expressions they participate in", ->
members = new Members()
members.fetch()
members.each (member) ->
expect(member.get("ParticipatesInConversations").length).to.equal(parseInt(member.get("ParticipationCount"),10))
describe "a model", ->
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can sync an indiviual model", ->
Member = V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
admin = new Member(_oid: "Member:1017")
admin.fetch()
expect(admin.get("Name")).to.equal("<NAME>")
| true | Backbone = require('backbone')
V1 = require('../V1.Backbone')
expect = require('chai').expect
recorded = require('./recorded')
deferred = require('JQDeferred')
describe "Fetching with `sync`", ->
describe "a collection", ->
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["PI:NAME:<NAME>END_PI"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection with a mixin", ->
Member = Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
Members = Backbone.Collection.extend
model: Member
V1.Backbone.mixInTo(Member)
V1.Backbone.mixInTo(Members)
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can find all members", ->
members = new Members()
members.fetch()
expectedNames = ["Administrator","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "can find talkative members, using filters", ->
members = new Members()
members.fetch(filter: ["ParticipatesInConversations.@Count>'4'"])
expectedNames = ["Administrator","PI:NAME:<NAME>END_PI"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
it "find members who belong to two scopes, using wheres", ->
members = new Members()
members.fetch(where: {"Scopes.@Count": 2 } )
expectedNames = ["Administrator"]
expect(members.pluck("Name")).to.deep.equal(expectedNames)
describe "a collection, with relations", ->
Expressions = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Expression"
schema: "Content"
Members = V1.Backbone.Collection.extend
model: V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [
"Name"
V1.Backbone.alias("ParticipatesInConversations.@Count").as("ParticipationCount")
V1.Backbone.relation("ParticipatesInConversations").of(Expressions)
]
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "will get members and the expressions they participate in", ->
members = new Members()
members.fetch()
members.each (member) ->
expect(member.get("ParticipatesInConversations").length).to.equal(parseInt(member.get("ParticipationCount"),10))
describe "a model", ->
beforeEach ->
V1.Backbone.setDefaultRetriever(url: "url", fetch: recorded)
afterEach ->
V1.Backbone.clearDefaultRetriever()
it "can sync an indiviual model", ->
Member = V1.Backbone.Model.extend
queryOptions:
assetType: "Member"
schema: [ "Name" ]
admin = new Member(_oid: "Member:1017")
admin.fetch()
expect(admin.get("Name")).to.equal("PI:NAME:<NAME>END_PI")
|
[
{
"context": "ince it will be created by Hannah\n #user: 'jgable'\n #repo: 'Slimer'\n #events: ['push'",
"end": 513,
"score": 0.9995492100715637,
"start": 507,
"tag": "USERNAME",
"value": "jgable"
},
{
"context": " to master on TryGhost/Ghost - https://github.com... | scripts/ghost-github.coffee | TryGhost/Slimer-hubot | 1 | # Description:
# Github Webhook Responding for TryGhost
#
# Dependencies:
# git-at-me (npm install git-at-me --save)
#
# Commands:
# None
github = require('git-at-me')
devRoom = '#ghost'
module.exports = (robot) ->
return unless robot.router
githubEvents = github
# TESTING: Must be generated with github.wizard()
#token: require('../github-token')
# Repo information for creating a webhook; not needed for Ghost since it will be created by Hannah
#user: 'jgable'
#repo: 'Slimer'
#events: ['push', 'pull_request', 'issues', 'issue_comment']
# TESTING: Using ngrok to generate this while testing
url: "http://#{process.env.HUBOT_HOSTNAME}/github/events"
skipHook: true
server: robot.router
githubEvents.on 'push', (pushData) ->
author = pushData.pusher.name
commits = pushData.commits.length
branch = pushData.ref.replace('refs/heads/', '')
repo = "#{pushData.repository.owner.name}/#{pushData.repository.name}"
compareUrl = pushData.compare
# Only output commits to master
return unless branch == 'master'
# Format: <Slimer> ErisDS pushed 2 commits to master on TryGhost/Ghost - https://github.com/jgable/git-at-me/compare/b29e18b9b2db...3722cee576e1
robot.messageRoom devRoom, "#{author} pushed #{commits} commits to #{branch} on #{repo} - #{compareUrl}"
githubEvents.on 'pull_request', (prData) ->
{ action, number, pull_request, sender, repository } = prData
{ html_url, title, user } = pull_request
action = "merged" if pull_request.merged
action = "updated" if action == "synchronize"
# Format: <Slimer> ErisDS merged PR #102 on TryGhost/Ghost - Fix bug on image uploader, fixes #92 - by JohnONolan - http://github.com/TryGhost/Ghost/Pulls/102
msg = "#{sender.login} #{action} PR ##{number} on #{repository.full_name} - #{title} - #{html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issues', (issueData) ->
{ action, issue, repository, sender } = issueData
return if action in ['labeled', 'unlabeled']
if action == 'assigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} to #{issueData.assignee.login}"
else if action == 'unassigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} from #{issueData.assignee.login}"
else
# Format: <Slimer> gotdibbs created issue #1035 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issue_comment', (commentData) ->
# Not reporting on comments right now
return
{ action, issue, comment, repository, sender } = commentData
return unless action == 'created'
# Format: <Slimer> jgable commented on issue #3 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} commented on Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{comment.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'error', (err) ->
console.log "Error in githubEvents: #{err.message}"
| 76487 | # Description:
# Github Webhook Responding for TryGhost
#
# Dependencies:
# git-at-me (npm install git-at-me --save)
#
# Commands:
# None
github = require('git-at-me')
devRoom = '#ghost'
module.exports = (robot) ->
return unless robot.router
githubEvents = github
# TESTING: Must be generated with github.wizard()
#token: require('../github-token')
# Repo information for creating a webhook; not needed for Ghost since it will be created by Hannah
#user: 'jgable'
#repo: 'Slimer'
#events: ['push', 'pull_request', 'issues', 'issue_comment']
# TESTING: Using ngrok to generate this while testing
url: "http://#{process.env.HUBOT_HOSTNAME}/github/events"
skipHook: true
server: robot.router
githubEvents.on 'push', (pushData) ->
author = pushData.pusher.name
commits = pushData.commits.length
branch = pushData.ref.replace('refs/heads/', '')
repo = "#{pushData.repository.owner.name}/#{pushData.repository.name}"
compareUrl = pushData.compare
# Only output commits to master
return unless branch == 'master'
# Format: <Slimer> ErisDS pushed 2 commits to master on TryGhost/Ghost - https://github.com/jgable/git-at-me/compare/b29e18b9b2db...3722cee576e1
robot.messageRoom devRoom, "#{author} pushed #{commits} commits to #{branch} on #{repo} - #{compareUrl}"
githubEvents.on 'pull_request', (prData) ->
{ action, number, pull_request, sender, repository } = prData
{ html_url, title, user } = pull_request
action = "merged" if pull_request.merged
action = "updated" if action == "synchronize"
# Format: <Slimer> ErisDS merged PR #102 on TryGhost/Ghost - Fix bug on image uploader, fixes #92 - by JohnONolan - http://github.com/TryGhost/Ghost/Pulls/102
msg = "#{sender.login} #{action} PR ##{number} on #{repository.full_name} - #{title} - #{html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issues', (issueData) ->
{ action, issue, repository, sender } = issueData
return if action in ['labeled', 'unlabeled']
if action == 'assigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} to #{issueData.assignee.login}"
else if action == 'unassigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} from #{issueData.assignee.login}"
else
# Format: <<NAME>> gotdibbs created issue #1035 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issue_comment', (commentData) ->
# Not reporting on comments right now
return
{ action, issue, comment, repository, sender } = commentData
return unless action == 'created'
# Format: <<NAME>> jgable commented on issue #3 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} commented on Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{comment.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'error', (err) ->
console.log "Error in githubEvents: #{err.message}"
| true | # Description:
# Github Webhook Responding for TryGhost
#
# Dependencies:
# git-at-me (npm install git-at-me --save)
#
# Commands:
# None
github = require('git-at-me')
devRoom = '#ghost'
module.exports = (robot) ->
return unless robot.router
githubEvents = github
# TESTING: Must be generated with github.wizard()
#token: require('../github-token')
# Repo information for creating a webhook; not needed for Ghost since it will be created by Hannah
#user: 'jgable'
#repo: 'Slimer'
#events: ['push', 'pull_request', 'issues', 'issue_comment']
# TESTING: Using ngrok to generate this while testing
url: "http://#{process.env.HUBOT_HOSTNAME}/github/events"
skipHook: true
server: robot.router
githubEvents.on 'push', (pushData) ->
author = pushData.pusher.name
commits = pushData.commits.length
branch = pushData.ref.replace('refs/heads/', '')
repo = "#{pushData.repository.owner.name}/#{pushData.repository.name}"
compareUrl = pushData.compare
# Only output commits to master
return unless branch == 'master'
# Format: <Slimer> ErisDS pushed 2 commits to master on TryGhost/Ghost - https://github.com/jgable/git-at-me/compare/b29e18b9b2db...3722cee576e1
robot.messageRoom devRoom, "#{author} pushed #{commits} commits to #{branch} on #{repo} - #{compareUrl}"
githubEvents.on 'pull_request', (prData) ->
{ action, number, pull_request, sender, repository } = prData
{ html_url, title, user } = pull_request
action = "merged" if pull_request.merged
action = "updated" if action == "synchronize"
# Format: <Slimer> ErisDS merged PR #102 on TryGhost/Ghost - Fix bug on image uploader, fixes #92 - by JohnONolan - http://github.com/TryGhost/Ghost/Pulls/102
msg = "#{sender.login} #{action} PR ##{number} on #{repository.full_name} - #{title} - #{html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issues', (issueData) ->
{ action, issue, repository, sender } = issueData
return if action in ['labeled', 'unlabeled']
if action == 'assigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} to #{issueData.assignee.login}"
else if action == 'unassigned'
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url} from #{issueData.assignee.login}"
else
# Format: <PI:NAME:<NAME>END_PI> gotdibbs created issue #1035 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} #{action} Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{issue.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'issue_comment', (commentData) ->
# Not reporting on comments right now
return
{ action, issue, comment, repository, sender } = commentData
return unless action == 'created'
# Format: <PI:NAME:<NAME>END_PI> jgable commented on issue #3 on TryGhost/Ghost - File uploads CSRF protection
msg = "#{sender.login} commented on Issue ##{issue.number} on #{repository.full_name} - #{issue.title} - #{comment.html_url}"
robot.messageRoom devRoom, msg
githubEvents.on 'error', (err) ->
console.log "Error in githubEvents: #{err.message}"
|
[
{
"context": "tion are supported:\n\n1. An object like `{ email: 'foo@bar.com' }`\n2. A query string like `'email=foo%40bar.com'",
"end": 509,
"score": 0.9997956156730652,
"start": 498,
"tag": "EMAIL",
"value": "foo@bar.com"
},
{
"context": " value }` objects like `[{ name: 'email', ... | lib/assets/javascripts/unpoly/classes/params.coffee | pfw/unpoly | 0 | u = up.util
e = up.element
###**
The `up.Params` class offers a consistent API to read and manipulate request parameters
independent of their type.
Request parameters are used in [form submissions](/up.Params.prototype.fromForm) and
[URLs](/up.Params.prototype.fromURL). Methods like `up.submit()` or `up.replace()` accept
request parameters as a `{ params }` option.
\#\#\# Supported parameter types
The following types of parameter representation are supported:
1. An object like `{ email: 'foo@bar.com' }`
2. A query string like `'email=foo%40bar.com'`
3. An array of `{ name, value }` objects like `[{ name: 'email', value: 'foo@bar.com' }]`
4. A [FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) object.
On IE 11 and Edge, `FormData` payloads require a [polyfill for `FormData#entries()`](https://github.com/jimmywarting/FormData).
@class up.Params
###
class up.Params extends up.Class
###**
Constructs a new `up.Params` instance.
@constructor up.Params
@param {Object|Array|string|up.Params} [params]
An existing list of params with which to initialize the new `up.Params` object.
The given params value may be of any [supported type](/up.Params).
@return {up.Params}
@experimental
###
constructor: (raw) ->
@clear()
@addAll(raw)
###**
Removes all params from this object.
@function up.Params#clear
@experimental
###
clear: ->
@entries = []
"#{u.copy.key}": ->
new up.Params(@)
###**
Returns an object representation of this `up.Params` instance.
The returned value is a simple JavaScript object with properties
that correspond to the key/values in the given `params`.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var object = params.toObject()
// object is now: {
// foo: 'bar',
// baz: 'bam'
// ]
@function up.Params#toObject
@return {Object}
@experimental
###
toObject: ->
obj = {}
for entry in @entries
{ name, value } = entry
unless u.isBasicObjectProperty(name)
if @isArrayKey(name)
obj[name] ||= []
obj[name].push(value)
else
obj[name] = value
obj
###**
Returns an array representation of this `up.Params` instance.
The returned value is a JavaScript array with elements that are objects with
`{ key }` and `{ value }` properties.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var array = params.toArray()
// array is now: [
// { name: 'foo', value: 'bar' },
// { name: 'baz', value: 'bam' }
// ]
@function up.Params#toArray
@return {Array}
@experimental
###
toArray: ->
@entries
###**
Returns a [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) representation
of this `up.Params` instance.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var formData = params.toFormData()
formData.get('foo') // 'bar'
formData.get('baz') // 'bam'
@function up.Params#toFormData
@return {FormData}
@experimental
###
toFormData: ->
formData = new FormData()
for entry in @entries
formData.append(entry.name, entry.value)
unless formData.entries
# If this browser cannot inspect FormData with the #entries()
# iterator, assign the original array for inspection by specs.
formData.originalArray = @entries
formData
###**
Returns an [query string](https://en.wikipedia.org/wiki/Query_string) for this `up.Params` instance.
The keys and values in the returned query string will be [percent-encoded](https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding).
Non-primitive values (like [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) will be omitted from
the retuned query string.
\#\#\# Example
var params = new up.Params({ foo: 'bar', baz: 'bam' })
var query = params.toQuery()
// query is now: 'foo=bar&baz=bam'
@function up.Params#toQuery
@param {Object|FormData|string|Array|undefined} params
the params to convert
@return {string}
a query string built from the given params
@experimental
###
toQuery: ->
parts = u.map(@entries, @arrayEntryToQuery)
parts = u.compact(parts)
parts.join('&')
arrayEntryToQuery: (entry) =>
value = entry.value
# We cannot transpot a binary value in a query string.
if @isBinaryValue(value)
return undefined
query = encodeURIComponent(entry.name)
# There is a subtle difference when encoding blank values:
# 1. An undefined or null value is encoded to `key` with no equals sign
# 2. An empty string value is encoded to `key=` with an equals sign but no value
if u.isGiven(value)
query += "="
query += encodeURIComponent(value)
query
###**
Returns whether the given value cannot be encoded into a query string.
We will have `File` values in our params when we serialize a form with a file input.
These entries will be filtered out when converting to a query string.
@function up.Params#isBinaryValue
@internal
###
isBinaryValue: (value) ->
value instanceof Blob
hasBinaryValues: ->
values = u.map(@entries, 'value')
return u.some(values, @isBinaryValue)
###**
Builds an URL string from the given base URL and
this `up.Params` instance as a [query string](/up.Params.toString).
The base URL may or may not already contain a query string. The
additional query string will be joined with an `&` or `?` character accordingly.
@function up.Params#toURL
@param {string} base
The base URL that will be prepended to this `up.Params` object as a [query string](/up.Params.toString).
@return {string}
The built URL.
@experimental
###
toURL: (base) ->
parts = [base, @toQuery()]
parts = u.filter(parts, u.isPresent)
separator = if u.contains(base, '?') then '&' else '?'
parts.join(separator)
###**
Adds a new entry with the given `name` and `value`.
An `up.Params` instance can hold multiple entries with the same name.
To overwrite all existing entries with the given `name`, use `up.Params#set()` instead.
\#\#\# Example
var params = new up.Params()
params.add('foo', 'fooValue')
var foo = params.get('foo')
// foo is now 'fooValue'
@function up.Params#add
@param {string} name
The name of the new entry.
@param {any} value
The value of the new entry.
@experimental
###
add: (name, value) ->
@entries.push({name, value})
###**
Adds all entries from the given list of params.
The given params value may be of any [supported type](/up.Params).
@function up.Params#addAll
@param {Object|Array|string|up.Params|undefined} params
@experimental
###
addAll: (raw) ->
if u.isMissing(raw)
# nothing to do
else if raw instanceof @constructor
@entries.push(raw.entries...)
else if u.isArray(raw)
# internal use for copying
@entries.push(raw...)
else if u.isString(raw)
@addAllFromQuery(raw)
else if u.isFormData(raw)
@addAllFromFormData(raw)
else if u.isObject(raw)
@addAllFromObject(raw)
else
up.fail("Unsupport params type: %o", raw)
addAllFromObject: (object) ->
for key, value of object
valueElements = if u.isArray(value) then value else [value]
for valueElement in valueElements
@add(key, valueElement)
addAllFromQuery: (query) ->
for part in query.split('&')
if part
[name, value] = part.split('=')
name = decodeURIComponent(name)
# There are three forms we need to handle:
# (1) foo=bar should become { name: 'foo', bar: 'bar' }
# (2) foo= should become { name: 'foo', bar: '' }
# (3) foo should become { name: 'foo', bar: null }
if u.isGiven(value)
value = decodeURIComponent(value)
else
value = null
@add(name, value)
addAllFromFormData: (formData) ->
u.eachIterator formData.entries(), (value) =>
@add(value...)
###**
Sets the `value` for the entry with given `name`.
An `up.Params` instance can hold multiple entries with the same name.
All existing entries with the given `name` are [deleted](/up.Params.prototype.delete) before the
new entry is set. To add a new entry even if the `name` is taken, use `up.Params#add()`.
@function up.Params#set
@param {string} name
The name of the entry to set.
@param {any} value
The new value of the entry.
@experimental
###
set: (name, value) ->
@delete(name)
@add(name, value)
###**
Deletes all entries with the given `name`.
@function up.Params#delete
@param {string} name
@experimental
###
delete: (name) ->
@entries = u.reject(@entries, @matchEntryFn(name))
matchEntryFn: (name) ->
(entry) -> entry.name == name
###**
Returns the first param value with the given `name` from the given `params`.
Returns `undefined` if no param value with that name is set.
If the `name` denotes an array field (e.g. `foo[]`), *all* param values with the given `name`
are returned as an array. If no param value with that array name is set, an empty
array is returned.
To always return a single value use `up.Params#getFirst()` instead.
To always return an array of values use `up.Params#getAll()` instead.
\#\#\# Example
var params = new up.Params({ foo: 'fooValue', bar: 'barValue' })
var params = new up.Params([
{ name: 'foo', value: 'fooValue' }
{ name: 'bar[]', value: 'barValue1' }
{ name: 'bar[]', value: 'barValue2' })
]})
var foo = params.get('foo')
// foo is now 'fooValue'
var bar = params.get('bar')
// bar is now ['barValue1', 'barValue2']
@function up.Params#get
@param {string} name
@experimental
###
get: (name) ->
if @isArrayKey(name)
@getAll(name)
else
@getFirst(name)
###**
Returns the first param value with the given `name`.
Returns `undefined` if no param value with that name is set.
@function up.Params#getFirst
@param {string} name
@return {any}
The value of the param with the given name.
@internal
###
getFirst: (name) ->
entry = u.find(@entries, @matchEntryFn(name))
entry?.value
###**
Returns an array of all param values with the given `name`.
Returns an empty array if no param value with that name is set.
@function up.Params#getAll
@param {string} name
@return {Array}
An array of all values with the given name.
@internal
###
getAll: (name) ->
if @isArrayKey(name)
@getAll(name)
else
entries = u.map(@entries, @matchEntryFn(name))
u.map(entries, 'value')
isArrayKey: (key) ->
u.endsWith(key, '[]')
"#{u.isBlank.key}": ->
@entries.length == 0
###**
Constructs a new `up.Params` instance from the given `<form>`.
The returned params may be passed as `{ params }` option to
`up.request()` or `up.replace()`.
The constructed `up.Params` will include exactly those form values that would be
included in a regular form submission. In particular:
- All `<input>` types are suppported
- Field values are usually strings, but an `<input type="file">` will produce
[`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) values.
- An `<input type="radio">` or `<input type="checkbox">` will only be added if they are `[checked]`.
- An `<select>` will only be added if at least one value is `[checked]`.
- If passed a `<select multiple>` or `<input type="file" multiple>`, all selected values are added.
If passed a `<select multiple>`, all selected values are added.
- Fields that are `[disabled]` are ignored
- Fields without a `[name]` attribute are ignored.
\#\#\# Example
Given this HTML form:
<form>
<input type="text" name="email" value="foo@bar.com">
<input type="password" name="pass" value="secret">
</form>
This would serialize the form into an array representation:
var params = up.Params.fromForm('input[name=email]')
var email = params.get('email') // email is now 'foo@bar.com'
var pass = params.get('pass') // pass is now 'secret'
@function up.Params.fromForm
@param {Element|jQuery|string} form
A `<form>` element or a selector that matches a `<form>` element.
@return {up.Params}
A new `up.Params` instance with values from the given form.
@experimental
###
@fromForm: (form) ->
# If passed a selector, up.fragment.get() will prefer a match on the current layer.
form = up.fragment.get(form)
@fromFields(up.form.fields(form))
###**
Constructs a new `up.Params` instance from one or more
[HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The constructed `up.Params` will include exactly those form values that would be
included for the given fields in a regular form submission. If a given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params.fromFields
@param {Element|List<Element>|jQuery} fields
@return {up.Params}
@experimental
###
@fromFields: (fields) ->
params = new @()
for field in u.wrapList(fields)
params.addField(field)
params
###**
Adds params from the given [HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The added params will include exactly those form values that would be
included for the given field in a regular form submission. If the given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params#addField
@param {Element|jQuery} field
@experimental
###
addField: (field) ->
params = new @constructor()
field = e.get(field) # unwrap jQuery
# Input fields are excluded from form submissions if they have no [name]
# or when they are [disabled].
if (name = field.name) && (!field.disabled)
tagName = field.tagName
type = field.type
if tagName == 'SELECT'
for option in field.querySelectorAll('option')
if option.selected
@add(name, option.value)
else if type == 'checkbox' || type == 'radio'
if field.checked
@add(name, field.value)
else if type == 'file'
# The value of an input[type=file] is the local path displayed in the form.
# The actual File objects are in the #files property.
for file in field.files
@add(name, file)
else
@add(name, field.value)
"#{u.isEqual.key}": (other) ->
other && (@constructor == other.constructor) && u.isEqual(@entries, other.entries)
###**
Constructs a new `up.Params` instance from the given URL's
[query string](https://en.wikipedia.org/wiki/Query_string).
Constructs an empty `up.Params` instance if the given URL has no query string.
\#\#\# Example
var params = up.Params.fromURL('http://foo.com?foo=fooValue&bar=barValue')
var foo = params.get('foo')
// foo is now: 'fooValue'
@function up.Params.fromURL
@param {string} url
The URL from which to extract the query string.
@return {string|undefined}
The given URL's query string, or `undefined` if the URL has no query component.
@experimental
###
@fromURL: (url) ->
params = new @()
urlParts = u.parseURL(url)
if query = urlParts.search
query = query.replace(/^\?/, '')
params.addAll(query)
params
###**
Returns the given URL without its [query string](https://en.wikipedia.org/wiki/Query_string).
\#\#\# Example
var url = up.Params.stripURL('http://foo.com?key=value')
// url is now: 'http://foo.com'
@function up.Params.stripURL
@param {string} url
A URL (with or without a query string).
@return {string}
The given URL without its query string.
@experimental
###
@stripURL: (url) ->
return u.normalizeURL(url, search: false)
###**
If passed an `up.Params` instance, it is returned unchanged.
Otherwise constructs an `up.Params` instance from the given value.
The given params value may be of any [supported type](/up.Params)
The return value is always an `up.Params` instance.
@function up.Params.wrap
@param {Object|Array|string|up.Params|undefined} params
@return {up.Params}
@experimental
###
| 154332 | u = up.util
e = up.element
###**
The `up.Params` class offers a consistent API to read and manipulate request parameters
independent of their type.
Request parameters are used in [form submissions](/up.Params.prototype.fromForm) and
[URLs](/up.Params.prototype.fromURL). Methods like `up.submit()` or `up.replace()` accept
request parameters as a `{ params }` option.
\#\#\# Supported parameter types
The following types of parameter representation are supported:
1. An object like `{ email: '<EMAIL>' }`
2. A query string like `'email=foo%40bar.com'`
3. An array of `{ name, value }` objects like `[{ name: 'email', value: '<EMAIL>' }]`
4. A [FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) object.
On IE 11 and Edge, `FormData` payloads require a [polyfill for `FormData#entries()`](https://github.com/jimmywarting/FormData).
@class up.Params
###
class up.Params extends up.Class
###**
Constructs a new `up.Params` instance.
@constructor up.Params
@param {Object|Array|string|up.Params} [params]
An existing list of params with which to initialize the new `up.Params` object.
The given params value may be of any [supported type](/up.Params).
@return {up.Params}
@experimental
###
constructor: (raw) ->
@clear()
@addAll(raw)
###**
Removes all params from this object.
@function up.Params#clear
@experimental
###
clear: ->
@entries = []
"#{u.copy.key}": ->
new up.Params(@)
###**
Returns an object representation of this `up.Params` instance.
The returned value is a simple JavaScript object with properties
that correspond to the key/values in the given `params`.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var object = params.toObject()
// object is now: {
// foo: 'bar',
// baz: 'bam'
// ]
@function up.Params#toObject
@return {Object}
@experimental
###
toObject: ->
obj = {}
for entry in @entries
{ name, value } = entry
unless u.isBasicObjectProperty(name)
if @isArrayKey(name)
obj[name] ||= []
obj[name].push(value)
else
obj[name] = value
obj
###**
Returns an array representation of this `up.Params` instance.
The returned value is a JavaScript array with elements that are objects with
`{ key }` and `{ value }` properties.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var array = params.toArray()
// array is now: [
// { name: 'foo', value: 'bar' },
// { name: 'baz', value: 'bam' }
// ]
@function up.Params#toArray
@return {Array}
@experimental
###
toArray: ->
@entries
###**
Returns a [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) representation
of this `up.Params` instance.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var formData = params.toFormData()
formData.get('foo') // 'bar'
formData.get('baz') // 'bam'
@function up.Params#toFormData
@return {FormData}
@experimental
###
toFormData: ->
formData = new FormData()
for entry in @entries
formData.append(entry.name, entry.value)
unless formData.entries
# If this browser cannot inspect FormData with the #entries()
# iterator, assign the original array for inspection by specs.
formData.originalArray = @entries
formData
###**
Returns an [query string](https://en.wikipedia.org/wiki/Query_string) for this `up.Params` instance.
The keys and values in the returned query string will be [percent-encoded](https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding).
Non-primitive values (like [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) will be omitted from
the retuned query string.
\#\#\# Example
var params = new up.Params({ foo: 'bar', baz: 'bam' })
var query = params.toQuery()
// query is now: 'foo=bar&baz=bam'
@function up.Params#toQuery
@param {Object|FormData|string|Array|undefined} params
the params to convert
@return {string}
a query string built from the given params
@experimental
###
toQuery: ->
parts = u.map(@entries, @arrayEntryToQuery)
parts = u.compact(parts)
parts.join('&')
arrayEntryToQuery: (entry) =>
value = entry.value
# We cannot transpot a binary value in a query string.
if @isBinaryValue(value)
return undefined
query = encodeURIComponent(entry.name)
# There is a subtle difference when encoding blank values:
# 1. An undefined or null value is encoded to `key` with no equals sign
# 2. An empty string value is encoded to `key=` with an equals sign but no value
if u.isGiven(value)
query += "="
query += encodeURIComponent(value)
query
###**
Returns whether the given value cannot be encoded into a query string.
We will have `File` values in our params when we serialize a form with a file input.
These entries will be filtered out when converting to a query string.
@function up.Params#isBinaryValue
@internal
###
isBinaryValue: (value) ->
value instanceof Blob
hasBinaryValues: ->
values = u.map(@entries, 'value')
return u.some(values, @isBinaryValue)
###**
Builds an URL string from the given base URL and
this `up.Params` instance as a [query string](/up.Params.toString).
The base URL may or may not already contain a query string. The
additional query string will be joined with an `&` or `?` character accordingly.
@function up.Params#toURL
@param {string} base
The base URL that will be prepended to this `up.Params` object as a [query string](/up.Params.toString).
@return {string}
The built URL.
@experimental
###
toURL: (base) ->
parts = [base, @toQuery()]
parts = u.filter(parts, u.isPresent)
separator = if u.contains(base, '?') then '&' else '?'
parts.join(separator)
###**
Adds a new entry with the given `name` and `value`.
An `up.Params` instance can hold multiple entries with the same name.
To overwrite all existing entries with the given `name`, use `up.Params#set()` instead.
\#\#\# Example
var params = new up.Params()
params.add('foo', 'fooValue')
var foo = params.get('foo')
// foo is now 'fooValue'
@function up.Params#add
@param {string} name
The name of the new entry.
@param {any} value
The value of the new entry.
@experimental
###
add: (name, value) ->
@entries.push({name, value})
###**
Adds all entries from the given list of params.
The given params value may be of any [supported type](/up.Params).
@function up.Params#addAll
@param {Object|Array|string|up.Params|undefined} params
@experimental
###
addAll: (raw) ->
if u.isMissing(raw)
# nothing to do
else if raw instanceof @constructor
@entries.push(raw.entries...)
else if u.isArray(raw)
# internal use for copying
@entries.push(raw...)
else if u.isString(raw)
@addAllFromQuery(raw)
else if u.isFormData(raw)
@addAllFromFormData(raw)
else if u.isObject(raw)
@addAllFromObject(raw)
else
up.fail("Unsupport params type: %o", raw)
addAllFromObject: (object) ->
for key, value of object
valueElements = if u.isArray(value) then value else [value]
for valueElement in valueElements
@add(key, valueElement)
addAllFromQuery: (query) ->
for part in query.split('&')
if part
[name, value] = part.split('=')
name = decodeURIComponent(name)
# There are three forms we need to handle:
# (1) foo=bar should become { name: 'foo', bar: 'bar' }
# (2) foo= should become { name: 'foo', bar: '' }
# (3) foo should become { name: 'foo', bar: null }
if u.isGiven(value)
value = decodeURIComponent(value)
else
value = null
@add(name, value)
addAllFromFormData: (formData) ->
u.eachIterator formData.entries(), (value) =>
@add(value...)
###**
Sets the `value` for the entry with given `name`.
An `up.Params` instance can hold multiple entries with the same name.
All existing entries with the given `name` are [deleted](/up.Params.prototype.delete) before the
new entry is set. To add a new entry even if the `name` is taken, use `up.Params#add()`.
@function up.Params#set
@param {string} name
The name of the entry to set.
@param {any} value
The new value of the entry.
@experimental
###
set: (name, value) ->
@delete(name)
@add(name, value)
###**
Deletes all entries with the given `name`.
@function up.Params#delete
@param {string} name
@experimental
###
delete: (name) ->
@entries = u.reject(@entries, @matchEntryFn(name))
matchEntryFn: (name) ->
(entry) -> entry.name == name
###**
Returns the first param value with the given `name` from the given `params`.
Returns `undefined` if no param value with that name is set.
If the `name` denotes an array field (e.g. `foo[]`), *all* param values with the given `name`
are returned as an array. If no param value with that array name is set, an empty
array is returned.
To always return a single value use `up.Params#getFirst()` instead.
To always return an array of values use `up.Params#getAll()` instead.
\#\#\# Example
var params = new up.Params({ foo: 'fooValue', bar: 'barValue' })
var params = new up.Params([
{ name: 'foo', value: 'fooValue' }
{ name: 'bar[]', value: 'barValue1' }
{ name: 'bar[]', value: 'barValue2' })
]})
var foo = params.get('foo')
// foo is now 'fooValue'
var bar = params.get('bar')
// bar is now ['barValue1', 'barValue2']
@function up.Params#get
@param {string} name
@experimental
###
get: (name) ->
if @isArrayKey(name)
@getAll(name)
else
@getFirst(name)
###**
Returns the first param value with the given `name`.
Returns `undefined` if no param value with that name is set.
@function up.Params#getFirst
@param {string} name
@return {any}
The value of the param with the given name.
@internal
###
getFirst: (name) ->
entry = u.find(@entries, @matchEntryFn(name))
entry?.value
###**
Returns an array of all param values with the given `name`.
Returns an empty array if no param value with that name is set.
@function up.Params#getAll
@param {string} name
@return {Array}
An array of all values with the given name.
@internal
###
getAll: (name) ->
if @isArrayKey(name)
@getAll(name)
else
entries = u.map(@entries, @matchEntryFn(name))
u.map(entries, 'value')
isArrayKey: (key) ->
u.endsWith(key, '[]')
"#{u.isBlank.key}": ->
@entries.length == 0
###**
Constructs a new `up.Params` instance from the given `<form>`.
The returned params may be passed as `{ params }` option to
`up.request()` or `up.replace()`.
The constructed `up.Params` will include exactly those form values that would be
included in a regular form submission. In particular:
- All `<input>` types are suppported
- Field values are usually strings, but an `<input type="file">` will produce
[`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) values.
- An `<input type="radio">` or `<input type="checkbox">` will only be added if they are `[checked]`.
- An `<select>` will only be added if at least one value is `[checked]`.
- If passed a `<select multiple>` or `<input type="file" multiple>`, all selected values are added.
If passed a `<select multiple>`, all selected values are added.
- Fields that are `[disabled]` are ignored
- Fields without a `[name]` attribute are ignored.
\#\#\# Example
Given this HTML form:
<form>
<input type="text" name="email" value="<EMAIL>">
<input type="<PASSWORD>" name="pass" value="<PASSWORD>">
</form>
This would serialize the form into an array representation:
var params = up.Params.fromForm('input[name=email]')
var email = params.get('email') // email is now '<EMAIL>'
var pass = params.get('pass') // pass is now '<PASSWORD>'
@function up.Params.fromForm
@param {Element|jQuery|string} form
A `<form>` element or a selector that matches a `<form>` element.
@return {up.Params}
A new `up.Params` instance with values from the given form.
@experimental
###
@fromForm: (form) ->
# If passed a selector, up.fragment.get() will prefer a match on the current layer.
form = up.fragment.get(form)
@fromFields(up.form.fields(form))
###**
Constructs a new `up.Params` instance from one or more
[HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The constructed `up.Params` will include exactly those form values that would be
included for the given fields in a regular form submission. If a given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params.fromFields
@param {Element|List<Element>|jQuery} fields
@return {up.Params}
@experimental
###
@fromFields: (fields) ->
params = new @()
for field in u.wrapList(fields)
params.addField(field)
params
###**
Adds params from the given [HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The added params will include exactly those form values that would be
included for the given field in a regular form submission. If the given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params#addField
@param {Element|jQuery} field
@experimental
###
addField: (field) ->
params = new @constructor()
field = e.get(field) # unwrap jQuery
# Input fields are excluded from form submissions if they have no [name]
# or when they are [disabled].
if (name = field.name) && (!field.disabled)
tagName = field.tagName
type = field.type
if tagName == 'SELECT'
for option in field.querySelectorAll('option')
if option.selected
@add(name, option.value)
else if type == 'checkbox' || type == 'radio'
if field.checked
@add(name, field.value)
else if type == 'file'
# The value of an input[type=file] is the local path displayed in the form.
# The actual File objects are in the #files property.
for file in field.files
@add(name, file)
else
@add(name, field.value)
"#{u.isEqual.key}": (other) ->
other && (@constructor == other.constructor) && u.isEqual(@entries, other.entries)
###**
Constructs a new `up.Params` instance from the given URL's
[query string](https://en.wikipedia.org/wiki/Query_string).
Constructs an empty `up.Params` instance if the given URL has no query string.
\#\#\# Example
var params = up.Params.fromURL('http://foo.com?foo=fooValue&bar=barValue')
var foo = params.get('foo')
// foo is now: 'fooValue'
@function up.Params.fromURL
@param {string} url
The URL from which to extract the query string.
@return {string|undefined}
The given URL's query string, or `undefined` if the URL has no query component.
@experimental
###
@fromURL: (url) ->
params = new @()
urlParts = u.parseURL(url)
if query = urlParts.search
query = query.replace(/^\?/, '')
params.addAll(query)
params
###**
Returns the given URL without its [query string](https://en.wikipedia.org/wiki/Query_string).
\#\#\# Example
var url = up.Params.stripURL('http://foo.com?key=value')
// url is now: 'http://foo.com'
@function up.Params.stripURL
@param {string} url
A URL (with or without a query string).
@return {string}
The given URL without its query string.
@experimental
###
@stripURL: (url) ->
return u.normalizeURL(url, search: false)
###**
If passed an `up.Params` instance, it is returned unchanged.
Otherwise constructs an `up.Params` instance from the given value.
The given params value may be of any [supported type](/up.Params)
The return value is always an `up.Params` instance.
@function up.Params.wrap
@param {Object|Array|string|up.Params|undefined} params
@return {up.Params}
@experimental
###
| true | u = up.util
e = up.element
###**
The `up.Params` class offers a consistent API to read and manipulate request parameters
independent of their type.
Request parameters are used in [form submissions](/up.Params.prototype.fromForm) and
[URLs](/up.Params.prototype.fromURL). Methods like `up.submit()` or `up.replace()` accept
request parameters as a `{ params }` option.
\#\#\# Supported parameter types
The following types of parameter representation are supported:
1. An object like `{ email: 'PI:EMAIL:<EMAIL>END_PI' }`
2. A query string like `'email=foo%40bar.com'`
3. An array of `{ name, value }` objects like `[{ name: 'email', value: 'PI:EMAIL:<EMAIL>END_PI' }]`
4. A [FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) object.
On IE 11 and Edge, `FormData` payloads require a [polyfill for `FormData#entries()`](https://github.com/jimmywarting/FormData).
@class up.Params
###
class up.Params extends up.Class
###**
Constructs a new `up.Params` instance.
@constructor up.Params
@param {Object|Array|string|up.Params} [params]
An existing list of params with which to initialize the new `up.Params` object.
The given params value may be of any [supported type](/up.Params).
@return {up.Params}
@experimental
###
constructor: (raw) ->
@clear()
@addAll(raw)
###**
Removes all params from this object.
@function up.Params#clear
@experimental
###
clear: ->
@entries = []
"#{u.copy.key}": ->
new up.Params(@)
###**
Returns an object representation of this `up.Params` instance.
The returned value is a simple JavaScript object with properties
that correspond to the key/values in the given `params`.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var object = params.toObject()
// object is now: {
// foo: 'bar',
// baz: 'bam'
// ]
@function up.Params#toObject
@return {Object}
@experimental
###
toObject: ->
obj = {}
for entry in @entries
{ name, value } = entry
unless u.isBasicObjectProperty(name)
if @isArrayKey(name)
obj[name] ||= []
obj[name].push(value)
else
obj[name] = value
obj
###**
Returns an array representation of this `up.Params` instance.
The returned value is a JavaScript array with elements that are objects with
`{ key }` and `{ value }` properties.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var array = params.toArray()
// array is now: [
// { name: 'foo', value: 'bar' },
// { name: 'baz', value: 'bam' }
// ]
@function up.Params#toArray
@return {Array}
@experimental
###
toArray: ->
@entries
###**
Returns a [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) representation
of this `up.Params` instance.
\#\#\# Example
var params = new up.Params('foo=bar&baz=bam')
var formData = params.toFormData()
formData.get('foo') // 'bar'
formData.get('baz') // 'bam'
@function up.Params#toFormData
@return {FormData}
@experimental
###
toFormData: ->
formData = new FormData()
for entry in @entries
formData.append(entry.name, entry.value)
unless formData.entries
# If this browser cannot inspect FormData with the #entries()
# iterator, assign the original array for inspection by specs.
formData.originalArray = @entries
formData
###**
Returns an [query string](https://en.wikipedia.org/wiki/Query_string) for this `up.Params` instance.
The keys and values in the returned query string will be [percent-encoded](https://developer.mozilla.org/en-US/docs/Glossary/percent-encoding).
Non-primitive values (like [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) will be omitted from
the retuned query string.
\#\#\# Example
var params = new up.Params({ foo: 'bar', baz: 'bam' })
var query = params.toQuery()
// query is now: 'foo=bar&baz=bam'
@function up.Params#toQuery
@param {Object|FormData|string|Array|undefined} params
the params to convert
@return {string}
a query string built from the given params
@experimental
###
toQuery: ->
parts = u.map(@entries, @arrayEntryToQuery)
parts = u.compact(parts)
parts.join('&')
arrayEntryToQuery: (entry) =>
value = entry.value
# We cannot transpot a binary value in a query string.
if @isBinaryValue(value)
return undefined
query = encodeURIComponent(entry.name)
# There is a subtle difference when encoding blank values:
# 1. An undefined or null value is encoded to `key` with no equals sign
# 2. An empty string value is encoded to `key=` with an equals sign but no value
if u.isGiven(value)
query += "="
query += encodeURIComponent(value)
query
###**
Returns whether the given value cannot be encoded into a query string.
We will have `File` values in our params when we serialize a form with a file input.
These entries will be filtered out when converting to a query string.
@function up.Params#isBinaryValue
@internal
###
isBinaryValue: (value) ->
value instanceof Blob
hasBinaryValues: ->
values = u.map(@entries, 'value')
return u.some(values, @isBinaryValue)
###**
Builds an URL string from the given base URL and
this `up.Params` instance as a [query string](/up.Params.toString).
The base URL may or may not already contain a query string. The
additional query string will be joined with an `&` or `?` character accordingly.
@function up.Params#toURL
@param {string} base
The base URL that will be prepended to this `up.Params` object as a [query string](/up.Params.toString).
@return {string}
The built URL.
@experimental
###
toURL: (base) ->
parts = [base, @toQuery()]
parts = u.filter(parts, u.isPresent)
separator = if u.contains(base, '?') then '&' else '?'
parts.join(separator)
###**
Adds a new entry with the given `name` and `value`.
An `up.Params` instance can hold multiple entries with the same name.
To overwrite all existing entries with the given `name`, use `up.Params#set()` instead.
\#\#\# Example
var params = new up.Params()
params.add('foo', 'fooValue')
var foo = params.get('foo')
// foo is now 'fooValue'
@function up.Params#add
@param {string} name
The name of the new entry.
@param {any} value
The value of the new entry.
@experimental
###
add: (name, value) ->
@entries.push({name, value})
###**
Adds all entries from the given list of params.
The given params value may be of any [supported type](/up.Params).
@function up.Params#addAll
@param {Object|Array|string|up.Params|undefined} params
@experimental
###
addAll: (raw) ->
if u.isMissing(raw)
# nothing to do
else if raw instanceof @constructor
@entries.push(raw.entries...)
else if u.isArray(raw)
# internal use for copying
@entries.push(raw...)
else if u.isString(raw)
@addAllFromQuery(raw)
else if u.isFormData(raw)
@addAllFromFormData(raw)
else if u.isObject(raw)
@addAllFromObject(raw)
else
up.fail("Unsupport params type: %o", raw)
addAllFromObject: (object) ->
for key, value of object
valueElements = if u.isArray(value) then value else [value]
for valueElement in valueElements
@add(key, valueElement)
addAllFromQuery: (query) ->
for part in query.split('&')
if part
[name, value] = part.split('=')
name = decodeURIComponent(name)
# There are three forms we need to handle:
# (1) foo=bar should become { name: 'foo', bar: 'bar' }
# (2) foo= should become { name: 'foo', bar: '' }
# (3) foo should become { name: 'foo', bar: null }
if u.isGiven(value)
value = decodeURIComponent(value)
else
value = null
@add(name, value)
addAllFromFormData: (formData) ->
u.eachIterator formData.entries(), (value) =>
@add(value...)
###**
Sets the `value` for the entry with given `name`.
An `up.Params` instance can hold multiple entries with the same name.
All existing entries with the given `name` are [deleted](/up.Params.prototype.delete) before the
new entry is set. To add a new entry even if the `name` is taken, use `up.Params#add()`.
@function up.Params#set
@param {string} name
The name of the entry to set.
@param {any} value
The new value of the entry.
@experimental
###
set: (name, value) ->
@delete(name)
@add(name, value)
###**
Deletes all entries with the given `name`.
@function up.Params#delete
@param {string} name
@experimental
###
delete: (name) ->
@entries = u.reject(@entries, @matchEntryFn(name))
matchEntryFn: (name) ->
(entry) -> entry.name == name
###**
Returns the first param value with the given `name` from the given `params`.
Returns `undefined` if no param value with that name is set.
If the `name` denotes an array field (e.g. `foo[]`), *all* param values with the given `name`
are returned as an array. If no param value with that array name is set, an empty
array is returned.
To always return a single value use `up.Params#getFirst()` instead.
To always return an array of values use `up.Params#getAll()` instead.
\#\#\# Example
var params = new up.Params({ foo: 'fooValue', bar: 'barValue' })
var params = new up.Params([
{ name: 'foo', value: 'fooValue' }
{ name: 'bar[]', value: 'barValue1' }
{ name: 'bar[]', value: 'barValue2' })
]})
var foo = params.get('foo')
// foo is now 'fooValue'
var bar = params.get('bar')
// bar is now ['barValue1', 'barValue2']
@function up.Params#get
@param {string} name
@experimental
###
get: (name) ->
if @isArrayKey(name)
@getAll(name)
else
@getFirst(name)
###**
Returns the first param value with the given `name`.
Returns `undefined` if no param value with that name is set.
@function up.Params#getFirst
@param {string} name
@return {any}
The value of the param with the given name.
@internal
###
getFirst: (name) ->
entry = u.find(@entries, @matchEntryFn(name))
entry?.value
###**
Returns an array of all param values with the given `name`.
Returns an empty array if no param value with that name is set.
@function up.Params#getAll
@param {string} name
@return {Array}
An array of all values with the given name.
@internal
###
getAll: (name) ->
if @isArrayKey(name)
@getAll(name)
else
entries = u.map(@entries, @matchEntryFn(name))
u.map(entries, 'value')
isArrayKey: (key) ->
u.endsWith(key, '[]')
"#{u.isBlank.key}": ->
@entries.length == 0
###**
Constructs a new `up.Params` instance from the given `<form>`.
The returned params may be passed as `{ params }` option to
`up.request()` or `up.replace()`.
The constructed `up.Params` will include exactly those form values that would be
included in a regular form submission. In particular:
- All `<input>` types are suppported
- Field values are usually strings, but an `<input type="file">` will produce
[`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) values.
- An `<input type="radio">` or `<input type="checkbox">` will only be added if they are `[checked]`.
- An `<select>` will only be added if at least one value is `[checked]`.
- If passed a `<select multiple>` or `<input type="file" multiple>`, all selected values are added.
If passed a `<select multiple>`, all selected values are added.
- Fields that are `[disabled]` are ignored
- Fields without a `[name]` attribute are ignored.
\#\#\# Example
Given this HTML form:
<form>
<input type="text" name="email" value="PI:EMAIL:<EMAIL>END_PI">
<input type="PI:PASSWORD:<PASSWORD>END_PI" name="pass" value="PI:PASSWORD:<PASSWORD>END_PI">
</form>
This would serialize the form into an array representation:
var params = up.Params.fromForm('input[name=email]')
var email = params.get('email') // email is now 'PI:EMAIL:<EMAIL>END_PI'
var pass = params.get('pass') // pass is now 'PI:PASSWORD:<PASSWORD>END_PI'
@function up.Params.fromForm
@param {Element|jQuery|string} form
A `<form>` element or a selector that matches a `<form>` element.
@return {up.Params}
A new `up.Params` instance with values from the given form.
@experimental
###
@fromForm: (form) ->
# If passed a selector, up.fragment.get() will prefer a match on the current layer.
form = up.fragment.get(form)
@fromFields(up.form.fields(form))
###**
Constructs a new `up.Params` instance from one or more
[HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The constructed `up.Params` will include exactly those form values that would be
included for the given fields in a regular form submission. If a given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params.fromFields
@param {Element|List<Element>|jQuery} fields
@return {up.Params}
@experimental
###
@fromFields: (fields) ->
params = new @()
for field in u.wrapList(fields)
params.addField(field)
params
###**
Adds params from the given [HTML form field](https://www.w3schools.com/html/html_form_elements.asp).
The added params will include exactly those form values that would be
included for the given field in a regular form submission. If the given field wouldn't
submit a value (like an unchecked `<input type="checkbox">`, nothing will be added.
See `up.Params.fromForm()` for more details and examples.
@function up.Params#addField
@param {Element|jQuery} field
@experimental
###
addField: (field) ->
params = new @constructor()
field = e.get(field) # unwrap jQuery
# Input fields are excluded from form submissions if they have no [name]
# or when they are [disabled].
if (name = field.name) && (!field.disabled)
tagName = field.tagName
type = field.type
if tagName == 'SELECT'
for option in field.querySelectorAll('option')
if option.selected
@add(name, option.value)
else if type == 'checkbox' || type == 'radio'
if field.checked
@add(name, field.value)
else if type == 'file'
# The value of an input[type=file] is the local path displayed in the form.
# The actual File objects are in the #files property.
for file in field.files
@add(name, file)
else
@add(name, field.value)
"#{u.isEqual.key}": (other) ->
other && (@constructor == other.constructor) && u.isEqual(@entries, other.entries)
###**
Constructs a new `up.Params` instance from the given URL's
[query string](https://en.wikipedia.org/wiki/Query_string).
Constructs an empty `up.Params` instance if the given URL has no query string.
\#\#\# Example
var params = up.Params.fromURL('http://foo.com?foo=fooValue&bar=barValue')
var foo = params.get('foo')
// foo is now: 'fooValue'
@function up.Params.fromURL
@param {string} url
The URL from which to extract the query string.
@return {string|undefined}
The given URL's query string, or `undefined` if the URL has no query component.
@experimental
###
@fromURL: (url) ->
params = new @()
urlParts = u.parseURL(url)
if query = urlParts.search
query = query.replace(/^\?/, '')
params.addAll(query)
params
###**
Returns the given URL without its [query string](https://en.wikipedia.org/wiki/Query_string).
\#\#\# Example
var url = up.Params.stripURL('http://foo.com?key=value')
// url is now: 'http://foo.com'
@function up.Params.stripURL
@param {string} url
A URL (with or without a query string).
@return {string}
The given URL without its query string.
@experimental
###
@stripURL: (url) ->
return u.normalizeURL(url, search: false)
###**
If passed an `up.Params` instance, it is returned unchanged.
Otherwise constructs an `up.Params` instance from the given value.
The given params value may be of any [supported type](/up.Params)
The return value is always an `up.Params` instance.
@function up.Params.wrap
@param {Object|Array|string|up.Params|undefined} params
@return {up.Params}
@experimental
###
|
[
{
"context": " (username, password) ->\n credentials = \"#{username}:#{password}\"\n basicCredentials = new Buff",
"end": 2324,
"score": 0.4347600042819977,
"start": 2316,
"tag": "PASSWORD",
"value": "username"
},
{
"context": "password) ->\n credentials = \"#{us... | main.coffee | NickH-nz/request-json | 0 | request = require "request"
fs = require "fs"
url = require "url"
depd = require "depd"
deprecate = depd "request-json"
requestJson = module.exports
# Function to build a request json client instance.
requestJson.createClient = (url, options = {}) ->
new requestJson.JsonClient url, options
requestJson.newClient = (url, options = {}) ->
deprecate "newClient() is deprecated, please use createClient()"
requestJson.createClient(url, options)
helpers =
# Merge two js objects. The result is a new object, the ones given in
# parameter are not changed.
merge: (obj1, obj2) ->
result = {}
result[key] = obj1[key] for key of obj1
if obj2?
result[key] = obj2[key] for key of obj2
result
# Build request options from every given parameters.
buildOptions: (clientOptions, clientHeaders, host, path, requestOptions) ->
# Check if there is something to merge before performing additional
# operation
if requestOptions isnt {}
options = helpers.merge clientOptions, requestOptions
if requestOptions? and requestOptions isnt {} and requestOptions.headers
options.headers = \
helpers.merge clientHeaders, requestOptions.headers
else
options.headers = clientHeaders
options.uri = url.resolve host, path
options
# Parse body assuming the body is a json object. Send an error if the body
# can't be parsed.
parseBody: (error, response, body, callback) ->
if typeof body is "string" and body isnt ""
try
parsed = JSON.parse body
catch err
msg = "Parsing error : #{err.message}, body= \n #{body}"
error ?= new Error msg
parsed = body
else parsed = body
callback error, response, parsed
# Small HTTP client for easy json interactions with Cozy backends.
class requestJson.JsonClient
# Set default headers
constructor: (@host, @options = {}) ->
@headers = @options.headers ? {}
@headers['accept'] = 'application/json'
@headers['user-agent'] = "request-json/1.0"
# Set basic authentication on each requests
setBasicAuth: (username, password) ->
credentials = "#{username}:#{password}"
basicCredentials = new Buffer(credentials).toString('base64')
@headers["authorization"] = "Basic #{basicCredentials}"
# Add a token to request header.
setToken: (token) ->
@headers["x-auth-token"] = token
# Add OAuth2 Bearer token to request header.
setBearerToken: (token) ->
@headers["authorization"] = "Bearer #{token}"
# Send a GET request to path. Parse response body to obtain a JS object.
get: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'GET'
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a POST request to path with given JSON as body.
post: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "POST"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PUT request to path with given JSON as body.
put: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PUT"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PATCH request to path with given JSON as body.
patch: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PATCH"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a HEAD request to path. Expect no response body.
head: (path, options, callback) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'HEAD'
request opts, (error, response, body) ->
callback error, response
# Send a DELETE request to path.
del: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "DELETE"
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Alias for del
delete: (path, options, callback, parse = true) ->
@del path, options, callback, parse
# Send a post request with file located at given path as attachment
# (multipart form)
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
sendFile: (path, files, data, callback) ->
callback = data if typeof(data) is "function"
req = @post path, null, callback, false #do not parse
form = req.form()
unless typeof(data) is "function"
for att of data
form.append att, data[att]
# files is a string so it is a file path
if typeof files is "string"
form.append "file", fs.createReadStream files
# files is not a string and is not an array so it is a stream
else if not Array.isArray files
form.append "file", files
# files is an array of strings and streams
else
index = 0
for file in files
index++
if typeof file is "string"
form.append "file#{index}", fs.createReadStream(file)
else
form.append "file#{index}", file
# Send a put request with file located at given path as attachment.
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
putFile: (path, file, data, callback) ->
callback = data if typeof(data) is "function"
req = @put path, null, callback, false #do not parse
# file is a string so it is a file path
if typeof file is "string"
fs.createReadStream(file).pipe(req)
# file is not a string and is not an array so it is a stream
else if not Array.isArray file
file.pipe(req)
# Retrieve file located at *path* and save it as *filePath*.
# Use a write stream for that.
saveFile: (path, filePath, callback) ->
stream = @get path, callback, false # do not parse result
stream.pipe fs.createWriteStream(filePath)
# Retrieve file located at *path* and return it as stream.
saveFileAsStream: (path, callback) ->
@get path, callback, false # do not parse result
| 42458 | request = require "request"
fs = require "fs"
url = require "url"
depd = require "depd"
deprecate = depd "request-json"
requestJson = module.exports
# Function to build a request json client instance.
requestJson.createClient = (url, options = {}) ->
new requestJson.JsonClient url, options
requestJson.newClient = (url, options = {}) ->
deprecate "newClient() is deprecated, please use createClient()"
requestJson.createClient(url, options)
helpers =
# Merge two js objects. The result is a new object, the ones given in
# parameter are not changed.
merge: (obj1, obj2) ->
result = {}
result[key] = obj1[key] for key of obj1
if obj2?
result[key] = obj2[key] for key of obj2
result
# Build request options from every given parameters.
buildOptions: (clientOptions, clientHeaders, host, path, requestOptions) ->
# Check if there is something to merge before performing additional
# operation
if requestOptions isnt {}
options = helpers.merge clientOptions, requestOptions
if requestOptions? and requestOptions isnt {} and requestOptions.headers
options.headers = \
helpers.merge clientHeaders, requestOptions.headers
else
options.headers = clientHeaders
options.uri = url.resolve host, path
options
# Parse body assuming the body is a json object. Send an error if the body
# can't be parsed.
parseBody: (error, response, body, callback) ->
if typeof body is "string" and body isnt ""
try
parsed = JSON.parse body
catch err
msg = "Parsing error : #{err.message}, body= \n #{body}"
error ?= new Error msg
parsed = body
else parsed = body
callback error, response, parsed
# Small HTTP client for easy json interactions with Cozy backends.
class requestJson.JsonClient
# Set default headers
constructor: (@host, @options = {}) ->
@headers = @options.headers ? {}
@headers['accept'] = 'application/json'
@headers['user-agent'] = "request-json/1.0"
# Set basic authentication on each requests
setBasicAuth: (username, password) ->
credentials = "#{<PASSWORD>}:#{<PASSWORD>}"
basicCredentials = new Buffer(credentials).toString('base64')
@headers["authorization"] = "Basic #{basicCredentials}"
# Add a token to request header.
setToken: (token) ->
@headers["x-auth-token"] = token
# Add OAuth2 Bearer token to request header.
setBearerToken: (token) ->
@headers["authorization"] = "Bearer #{token}"
# Send a GET request to path. Parse response body to obtain a JS object.
get: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'GET'
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a POST request to path with given JSON as body.
post: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "POST"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PUT request to path with given JSON as body.
put: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PUT"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PATCH request to path with given JSON as body.
patch: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PATCH"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a HEAD request to path. Expect no response body.
head: (path, options, callback) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'HEAD'
request opts, (error, response, body) ->
callback error, response
# Send a DELETE request to path.
del: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "DELETE"
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Alias for del
delete: (path, options, callback, parse = true) ->
@del path, options, callback, parse
# Send a post request with file located at given path as attachment
# (multipart form)
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
sendFile: (path, files, data, callback) ->
callback = data if typeof(data) is "function"
req = @post path, null, callback, false #do not parse
form = req.form()
unless typeof(data) is "function"
for att of data
form.append att, data[att]
# files is a string so it is a file path
if typeof files is "string"
form.append "file", fs.createReadStream files
# files is not a string and is not an array so it is a stream
else if not Array.isArray files
form.append "file", files
# files is an array of strings and streams
else
index = 0
for file in files
index++
if typeof file is "string"
form.append "file#{index}", fs.createReadStream(file)
else
form.append "file#{index}", file
# Send a put request with file located at given path as attachment.
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
putFile: (path, file, data, callback) ->
callback = data if typeof(data) is "function"
req = @put path, null, callback, false #do not parse
# file is a string so it is a file path
if typeof file is "string"
fs.createReadStream(file).pipe(req)
# file is not a string and is not an array so it is a stream
else if not Array.isArray file
file.pipe(req)
# Retrieve file located at *path* and save it as *filePath*.
# Use a write stream for that.
saveFile: (path, filePath, callback) ->
stream = @get path, callback, false # do not parse result
stream.pipe fs.createWriteStream(filePath)
# Retrieve file located at *path* and return it as stream.
saveFileAsStream: (path, callback) ->
@get path, callback, false # do not parse result
| true | request = require "request"
fs = require "fs"
url = require "url"
depd = require "depd"
deprecate = depd "request-json"
requestJson = module.exports
# Function to build a request json client instance.
requestJson.createClient = (url, options = {}) ->
new requestJson.JsonClient url, options
requestJson.newClient = (url, options = {}) ->
deprecate "newClient() is deprecated, please use createClient()"
requestJson.createClient(url, options)
helpers =
# Merge two js objects. The result is a new object, the ones given in
# parameter are not changed.
merge: (obj1, obj2) ->
result = {}
result[key] = obj1[key] for key of obj1
if obj2?
result[key] = obj2[key] for key of obj2
result
# Build request options from every given parameters.
buildOptions: (clientOptions, clientHeaders, host, path, requestOptions) ->
# Check if there is something to merge before performing additional
# operation
if requestOptions isnt {}
options = helpers.merge clientOptions, requestOptions
if requestOptions? and requestOptions isnt {} and requestOptions.headers
options.headers = \
helpers.merge clientHeaders, requestOptions.headers
else
options.headers = clientHeaders
options.uri = url.resolve host, path
options
# Parse body assuming the body is a json object. Send an error if the body
# can't be parsed.
parseBody: (error, response, body, callback) ->
if typeof body is "string" and body isnt ""
try
parsed = JSON.parse body
catch err
msg = "Parsing error : #{err.message}, body= \n #{body}"
error ?= new Error msg
parsed = body
else parsed = body
callback error, response, parsed
# Small HTTP client for easy json interactions with Cozy backends.
class requestJson.JsonClient
# Set default headers
constructor: (@host, @options = {}) ->
@headers = @options.headers ? {}
@headers['accept'] = 'application/json'
@headers['user-agent'] = "request-json/1.0"
# Set basic authentication on each requests
setBasicAuth: (username, password) ->
credentials = "#{PI:PASSWORD:<PASSWORD>END_PI}:#{PI:PASSWORD:<PASSWORD>END_PI}"
basicCredentials = new Buffer(credentials).toString('base64')
@headers["authorization"] = "Basic #{basicCredentials}"
# Add a token to request header.
setToken: (token) ->
@headers["x-auth-token"] = token
# Add OAuth2 Bearer token to request header.
setBearerToken: (token) ->
@headers["authorization"] = "Bearer #{token}"
# Send a GET request to path. Parse response body to obtain a JS object.
get: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'GET'
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a POST request to path with given JSON as body.
post: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "POST"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PUT request to path with given JSON as body.
put: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PUT"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a PATCH request to path with given JSON as body.
patch: (path, json, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "PATCH"
opts.json = json
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Send a HEAD request to path. Expect no response body.
head: (path, options, callback) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = 'HEAD'
request opts, (error, response, body) ->
callback error, response
# Send a DELETE request to path.
del: (path, options, callback, parse = true) ->
if typeof options is 'function'
parse = callback if typeof callback is 'boolean'
callback = options
options = {}
opts = helpers.buildOptions @options, @headers, @host, path, options
opts.method = "DELETE"
request opts, (error, response, body) ->
if parse then helpers.parseBody error, response, body, callback
else callback error, response, body
# Alias for del
delete: (path, options, callback, parse = true) ->
@del path, options, callback, parse
# Send a post request with file located at given path as attachment
# (multipart form)
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
sendFile: (path, files, data, callback) ->
callback = data if typeof(data) is "function"
req = @post path, null, callback, false #do not parse
form = req.form()
unless typeof(data) is "function"
for att of data
form.append att, data[att]
# files is a string so it is a file path
if typeof files is "string"
form.append "file", fs.createReadStream files
# files is not a string and is not an array so it is a stream
else if not Array.isArray files
form.append "file", files
# files is an array of strings and streams
else
index = 0
for file in files
index++
if typeof file is "string"
form.append "file#{index}", fs.createReadStream(file)
else
form.append "file#{index}", file
# Send a put request with file located at given path as attachment.
# Use a read stream for that.
# If you use a stream, it must have a "path" attribute...
# ...with its path or filename
putFile: (path, file, data, callback) ->
callback = data if typeof(data) is "function"
req = @put path, null, callback, false #do not parse
# file is a string so it is a file path
if typeof file is "string"
fs.createReadStream(file).pipe(req)
# file is not a string and is not an array so it is a stream
else if not Array.isArray file
file.pipe(req)
# Retrieve file located at *path* and save it as *filePath*.
# Use a write stream for that.
saveFile: (path, filePath, callback) ->
stream = @get path, callback, false # do not parse result
stream.pipe fs.createWriteStream(filePath)
# Retrieve file located at *path* and return it as stream.
saveFileAsStream: (path, callback) ->
@get path, callback, false # do not parse result
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999127388000488,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/contest-voting.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { ArtEntryList } from './contest-voting/art-entry-list'
import { EntryList } from './contest-voting/entry-list'
propsFunction = (target) ->
data = osu.parseJson target.dataset.src
return {
contest: data.contest
selected: data.userVotes
options:
showPreview: data.contest['type'] == 'music'
showLink: data.contest['type'] == 'beatmap' && _.some(data.contest.entries, 'preview')
}
reactTurbolinks.register 'contestArtList', ArtEntryList, propsFunction
reactTurbolinks.register 'contestList', EntryList, propsFunction
| 29087 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { ArtEntryList } from './contest-voting/art-entry-list'
import { EntryList } from './contest-voting/entry-list'
propsFunction = (target) ->
data = osu.parseJson target.dataset.src
return {
contest: data.contest
selected: data.userVotes
options:
showPreview: data.contest['type'] == 'music'
showLink: data.contest['type'] == 'beatmap' && _.some(data.contest.entries, 'preview')
}
reactTurbolinks.register 'contestArtList', ArtEntryList, propsFunction
reactTurbolinks.register 'contestList', EntryList, propsFunction
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { ArtEntryList } from './contest-voting/art-entry-list'
import { EntryList } from './contest-voting/entry-list'
propsFunction = (target) ->
data = osu.parseJson target.dataset.src
return {
contest: data.contest
selected: data.userVotes
options:
showPreview: data.contest['type'] == 'music'
showLink: data.contest['type'] == 'beatmap' && _.some(data.contest.entries, 'preview')
}
reactTurbolinks.register 'contestArtList', ArtEntryList, propsFunction
reactTurbolinks.register 'contestList', EntryList, propsFunction
|
[
{
"context": "mat\n pathname: '/'\n query:\n user: @username\n password: @password\n version: 12\n\n",
"end": 439,
"score": 0.9961163997650146,
"start": 430,
"tag": "USERNAME",
"value": "@username"
},
{
"context": " query:\n user: @username\n ... | src/client.coffee | sittiaminah/hubot-minecraft | 161 | events = require 'events'
http = require 'http'
url = require 'url'
net = require 'net'
Parser = require('./parser').Parser
Packet = require('./packet').Packet
Protocol = require('./protocol')
class exports.Client extends events.EventEmitter
# TODO: Refactor!
constructor: (@port, @host, @username, @password) ->
@parser = new Parser()
loginPath = url.format
pathname: '/'
query:
user: @username
password: @password
version: 12
req = http.get {hostname: 'login.minecraft.net', path: loginPath}, (resp) =>
resp.on 'data', (data) =>
body = data.toString()
if body is 'Bad login'
console.error(body)
process.exit(1)
sessionId = body.split(':', 4)[3]
# Connect to the server
@conn = net.createConnection(@port, @host)
@conn.on 'data', (data) => @addData(data)
@conn.on 'end', => @emit 'end'
@conn.on 'connect', =>
# Send our username
@writePacket 0x02, @username
# respond to keepalive packets
@on 'keepalive', (id) => @writePacket 0x00, id
# Get back the serverId
@once 'handshake', (serverId) =>
# Verify the serverId
sessionPath = url.format
pathname: '/game/joinserver.jsp'
query:
user: @username
sessionId: sessionId
serverId: serverId
vreq = http.get {hostname: 'session.minecraft.net', path: sessionPath}, (vresp) =>
vresp.on 'data', (data) =>
body = data.toString()
if body isnt 'OK'
console.error(body)
process.exit(1)
@writePacket 0x01, 22, @username, 0, 0, 0, 0, 0, 0
@once 'login', (@eId, _, seed, mode, dim, difficulty, height, maxPlayers) =>
@world =
seed: seed
mode: mode
dimension: dim
difficulty: difficulty
maxPlayers: maxPlayers
@emit 'connect', @
# Echos the 0x0D packet (needs to happen otherwise server fucks out)
@once 'player position and look', (x, stance, y, z, yaw, pitch, grounded) =>
@writepacket 0x0D, arguments...
writePacket: (header, payload...) ->
if typeof(payload[payload.length - 1]) is 'function'
callback = payload.pop()
packet = new Packet(header)
@conn.write packet.build(payload...), callback
addData: (data) ->
# If data already exists, add this new stuff
@packet = if @packet?
p = new Buffer(@packet.length + data.length)
@packet.copy(p, 0, 0)
data.copy(p, @packet.length, 0)
p
else
data
@parsePacket()
parsePacket: ->
try
[bytesParsed, header, payload] = @parser.parse(@packet)
# Continue parsing left over data
@packet = if bytesParsed < @packet.length
@packet.slice(bytesParsed)
else
null
# Human readable event with the payload as args
event = Protocol.LABELS[header] || 'unhandled'
@emit event, payload...
@parsePacket() if @packet?
# An error parsing means the data crosses over two packets and we need to try again when another packet comes in.
catch e
@parser.rewind()
# Convenience functions
say: (msg) ->
msg.split("\n").forEach (line) =>
if line.length > 100
line = line.substring(0, 100)
chatPacket = new Packet(0x03)
@conn.write chatPacket.build(line)
| 165881 | events = require 'events'
http = require 'http'
url = require 'url'
net = require 'net'
Parser = require('./parser').Parser
Packet = require('./packet').Packet
Protocol = require('./protocol')
class exports.Client extends events.EventEmitter
# TODO: Refactor!
constructor: (@port, @host, @username, @password) ->
@parser = new Parser()
loginPath = url.format
pathname: '/'
query:
user: @username
password: <PASSWORD>
version: 12
req = http.get {hostname: 'login.minecraft.net', path: loginPath}, (resp) =>
resp.on 'data', (data) =>
body = data.toString()
if body is 'Bad login'
console.error(body)
process.exit(1)
sessionId = body.split(':', 4)[3]
# Connect to the server
@conn = net.createConnection(@port, @host)
@conn.on 'data', (data) => @addData(data)
@conn.on 'end', => @emit 'end'
@conn.on 'connect', =>
# Send our username
@writePacket 0x02, @username
# respond to keepalive packets
@on 'keepalive', (id) => @writePacket 0x00, id
# Get back the serverId
@once 'handshake', (serverId) =>
# Verify the serverId
sessionPath = url.format
pathname: '/game/joinserver.jsp'
query:
user: @username
sessionId: sessionId
serverId: serverId
vreq = http.get {hostname: 'session.minecraft.net', path: sessionPath}, (vresp) =>
vresp.on 'data', (data) =>
body = data.toString()
if body isnt 'OK'
console.error(body)
process.exit(1)
@writePacket 0x01, 22, @username, 0, 0, 0, 0, 0, 0
@once 'login', (@eId, _, seed, mode, dim, difficulty, height, maxPlayers) =>
@world =
seed: seed
mode: mode
dimension: dim
difficulty: difficulty
maxPlayers: maxPlayers
@emit 'connect', @
# Echos the 0x0D packet (needs to happen otherwise server fucks out)
@once 'player position and look', (x, stance, y, z, yaw, pitch, grounded) =>
@writepacket 0x0D, arguments...
writePacket: (header, payload...) ->
if typeof(payload[payload.length - 1]) is 'function'
callback = payload.pop()
packet = new Packet(header)
@conn.write packet.build(payload...), callback
addData: (data) ->
# If data already exists, add this new stuff
@packet = if @packet?
p = new Buffer(@packet.length + data.length)
@packet.copy(p, 0, 0)
data.copy(p, @packet.length, 0)
p
else
data
@parsePacket()
parsePacket: ->
try
[bytesParsed, header, payload] = @parser.parse(@packet)
# Continue parsing left over data
@packet = if bytesParsed < @packet.length
@packet.slice(bytesParsed)
else
null
# Human readable event with the payload as args
event = Protocol.LABELS[header] || 'unhandled'
@emit event, payload...
@parsePacket() if @packet?
# An error parsing means the data crosses over two packets and we need to try again when another packet comes in.
catch e
@parser.rewind()
# Convenience functions
say: (msg) ->
msg.split("\n").forEach (line) =>
if line.length > 100
line = line.substring(0, 100)
chatPacket = new Packet(0x03)
@conn.write chatPacket.build(line)
| true | events = require 'events'
http = require 'http'
url = require 'url'
net = require 'net'
Parser = require('./parser').Parser
Packet = require('./packet').Packet
Protocol = require('./protocol')
class exports.Client extends events.EventEmitter
# TODO: Refactor!
constructor: (@port, @host, @username, @password) ->
@parser = new Parser()
loginPath = url.format
pathname: '/'
query:
user: @username
password: PI:PASSWORD:<PASSWORD>END_PI
version: 12
req = http.get {hostname: 'login.minecraft.net', path: loginPath}, (resp) =>
resp.on 'data', (data) =>
body = data.toString()
if body is 'Bad login'
console.error(body)
process.exit(1)
sessionId = body.split(':', 4)[3]
# Connect to the server
@conn = net.createConnection(@port, @host)
@conn.on 'data', (data) => @addData(data)
@conn.on 'end', => @emit 'end'
@conn.on 'connect', =>
# Send our username
@writePacket 0x02, @username
# respond to keepalive packets
@on 'keepalive', (id) => @writePacket 0x00, id
# Get back the serverId
@once 'handshake', (serverId) =>
# Verify the serverId
sessionPath = url.format
pathname: '/game/joinserver.jsp'
query:
user: @username
sessionId: sessionId
serverId: serverId
vreq = http.get {hostname: 'session.minecraft.net', path: sessionPath}, (vresp) =>
vresp.on 'data', (data) =>
body = data.toString()
if body isnt 'OK'
console.error(body)
process.exit(1)
@writePacket 0x01, 22, @username, 0, 0, 0, 0, 0, 0
@once 'login', (@eId, _, seed, mode, dim, difficulty, height, maxPlayers) =>
@world =
seed: seed
mode: mode
dimension: dim
difficulty: difficulty
maxPlayers: maxPlayers
@emit 'connect', @
# Echos the 0x0D packet (needs to happen otherwise server fucks out)
@once 'player position and look', (x, stance, y, z, yaw, pitch, grounded) =>
@writepacket 0x0D, arguments...
writePacket: (header, payload...) ->
if typeof(payload[payload.length - 1]) is 'function'
callback = payload.pop()
packet = new Packet(header)
@conn.write packet.build(payload...), callback
addData: (data) ->
# If data already exists, add this new stuff
@packet = if @packet?
p = new Buffer(@packet.length + data.length)
@packet.copy(p, 0, 0)
data.copy(p, @packet.length, 0)
p
else
data
@parsePacket()
parsePacket: ->
try
[bytesParsed, header, payload] = @parser.parse(@packet)
# Continue parsing left over data
@packet = if bytesParsed < @packet.length
@packet.slice(bytesParsed)
else
null
# Human readable event with the payload as args
event = Protocol.LABELS[header] || 'unhandled'
@emit event, payload...
@parsePacket() if @packet?
# An error parsing means the data crosses over two packets and we need to try again when another packet comes in.
catch e
@parser.rewind()
# Convenience functions
say: (msg) ->
msg.split("\n").forEach (line) =>
if line.length > 100
line = line.substring(0, 100)
chatPacket = new Packet(0x03)
@conn.write chatPacket.build(line)
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998902678489685,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhaki... | src/app/controllers/userController.coffee | AbdelhakimRafik/Project | 1 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date June 2021
###
{ User } = require '../models'
###
Get all users or filtred list if offset and limit are provided
###
module.exports.getUsers = (req, res) ->
# get users from database
users = await User.findAll offset: req.params.offset, limit: req.params.limit
# return users list
res.json users
###
Get user by id
###
module.exports.getUserById = (req, res) ->
# get user from data base with given id
user = await User.findByPk req.params.id
# return user
res.json user
###
Register a new user
###
module.exports.addUser = (req, res) ->
# add user to database
User.create req.body
.then () ->
# if user created successfully
res.json message: 'User created successfully'
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Update a given user data
###
module.exports.updateUser = (req, res) ->
# update user information
User.update req.body, where: id: req.params.id
.then () ->
res.json message: "User updated successfully"
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Delete user by given id
###
module.exports.deleteUser = (req, res) ->
# delete user by id
deleted = await User.destroy where: id: req.params.id
# if user deleted
if deleted
res.json
message: "User deleted successfully"
else
res.status(500).json
message: "User not found" | 26746 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date June 2021
###
{ User } = require '../models'
###
Get all users or filtred list if offset and limit are provided
###
module.exports.getUsers = (req, res) ->
# get users from database
users = await User.findAll offset: req.params.offset, limit: req.params.limit
# return users list
res.json users
###
Get user by id
###
module.exports.getUserById = (req, res) ->
# get user from data base with given id
user = await User.findByPk req.params.id
# return user
res.json user
###
Register a new user
###
module.exports.addUser = (req, res) ->
# add user to database
User.create req.body
.then () ->
# if user created successfully
res.json message: 'User created successfully'
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Update a given user data
###
module.exports.updateUser = (req, res) ->
# update user information
User.update req.body, where: id: req.params.id
.then () ->
res.json message: "User updated successfully"
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Delete user by given id
###
module.exports.deleteUser = (req, res) ->
# delete user by id
deleted = await User.destroy where: id: req.params.id
# if user deleted
if deleted
res.json
message: "User deleted successfully"
else
res.status(500).json
message: "User not found" | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date June 2021
###
{ User } = require '../models'
###
Get all users or filtred list if offset and limit are provided
###
module.exports.getUsers = (req, res) ->
# get users from database
users = await User.findAll offset: req.params.offset, limit: req.params.limit
# return users list
res.json users
###
Get user by id
###
module.exports.getUserById = (req, res) ->
# get user from data base with given id
user = await User.findByPk req.params.id
# return user
res.json user
###
Register a new user
###
module.exports.addUser = (req, res) ->
# add user to database
User.create req.body
.then () ->
# if user created successfully
res.json message: 'User created successfully'
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Update a given user data
###
module.exports.updateUser = (req, res) ->
# update user information
User.update req.body, where: id: req.params.id
.then () ->
res.json message: "User updated successfully"
return
.catch (exp) ->
if exp.name is 'SequelizeValidationError'
res.status(400).json
message: 'Data validation error'
errors: exp.errors.map (err) -> err.message
else
res.staus(500).json
message: 'Server error occured'
return
return
###
Delete user by given id
###
module.exports.deleteUser = (req, res) ->
# delete user by id
deleted = await User.destroy where: id: req.params.id
# if user deleted
if deleted
res.json
message: "User deleted successfully"
else
res.status(500).json
message: "User not found" |
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998974800109863,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhaki... | src/app/controllers/authController.coffee | AbdelhakimRafik/Pharmalogy-API | 0 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date Mar 2021
###
Validator = require 'validatorjs'
bcrypt = require 'bcrypt'
jwt = require 'jsonwebtoken'
config = require '../../config'
User = require '../models/user'
###
@api {post} /api/signup Registration
@apiName SignUp
@apiGroup User
@apiPermission public
@apiDescription
Register new user if not exist.
@apiParam {String} firstName User first name.
@apiParam {String} lastName User last name.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiParam {String{10}} [phone] User number phone.
@apiParam {String} [city] User city name.
@apiParam {String} [country] User country name.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"message": "User created successfully"
}
@apiErrorExample {json} Error-Email-exists
HTTP/1.1 401
{
"message": "Email already exists"
}
@apiErrorExample {json} Error-Data
HTTP/1.1 400
{
"message": "Data errors"
"errors": errors list
"errorCount": number of errors
}
###
module.exports.signup = (req, res) ->
# user validation rules
userRules =
firstName: 'required|string'
lastName: 'required|string'
email: 'required|email'
password: 'required|min:5'
phone: 'size:10'
city: 'string'
country: 'string'
# validate request data
validation = new Validator req.body, userRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if user already registred
user = await User.findOne where: email: req.body.email
# user not found
unless user
# hash given password
req.body.password = bcrypt.hashSync req.body.password, config.bcrypt.salt
# add user to database
user = await User.create req.body
# check if user created
if user
res.status(200).json
message: "User created successfully"
# user found
else
res.status(401).json
message: "Email already exists"
return
###
@api {post} /api/signin Authentification
@apiName SignIn
@apiGroup User
@apiPermission public
@apiDescription
Authentify a user with his email and password, and return a token.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"auth": true
"message": "User authenticated successfully"
"token": token
"user":
"firstName": user firstName
"lastName": user lastName
}
@apiErrorExample {json} Error-Response
HTTP/1.1 401
{
"auth": false
"message": "Email or password incorrect"
}
###
module.exports.signin = (req, res) ->
# find user
user = await User.findOne where: email: req.body.email
# user found
if user
avatar = await do user.getPharmacy
console.log avatar
# check user password
if bcrypt.compareSync req.body.password, user.password
# create token
token = jwt.sign id:user.id, config.jwt.secret
# send response
res.status(200).json
auth: true
message: "User authenticated successfully"
token: token
user:
firstName: user.firstName
lastName: user.lastName
return
# if error occured
res.status(401).json
auth: false
message: "Email or password incorrect" | 37516 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date Mar 2021
###
Validator = require 'validatorjs'
bcrypt = require 'bcrypt'
jwt = require 'jsonwebtoken'
config = require '../../config'
User = require '../models/user'
###
@api {post} /api/signup Registration
@apiName SignUp
@apiGroup User
@apiPermission public
@apiDescription
Register new user if not exist.
@apiParam {String} firstName User first name.
@apiParam {String} lastName User last name.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiParam {String{10}} [phone] User number phone.
@apiParam {String} [city] User city name.
@apiParam {String} [country] User country name.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"message": "User created successfully"
}
@apiErrorExample {json} Error-Email-exists
HTTP/1.1 401
{
"message": "Email already exists"
}
@apiErrorExample {json} Error-Data
HTTP/1.1 400
{
"message": "Data errors"
"errors": errors list
"errorCount": number of errors
}
###
module.exports.signup = (req, res) ->
# user validation rules
userRules =
firstName: 'required|string'
lastName: 'required|string'
email: 'required|email'
password: '<PASSWORD>'
phone: 'size:10'
city: 'string'
country: 'string'
# validate request data
validation = new Validator req.body, userRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if user already registred
user = await User.findOne where: email: req.body.email
# user not found
unless user
# hash given password
req.body.password = <PASSWORD>.hashSync req.body.password, config.bcrypt.salt
# add user to database
user = await User.create req.body
# check if user created
if user
res.status(200).json
message: "User created successfully"
# user found
else
res.status(401).json
message: "Email already exists"
return
###
@api {post} /api/signin Authentification
@apiName SignIn
@apiGroup User
@apiPermission public
@apiDescription
Authentify a user with his email and password, and return a token.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"auth": true
"message": "User authenticated successfully"
"token": token
"user":
"firstName": user firstName
"lastName": user lastName
}
@apiErrorExample {json} Error-Response
HTTP/1.1 401
{
"auth": false
"message": "Email or password incorrect"
}
###
module.exports.signin = (req, res) ->
# find user
user = await User.findOne where: email: req.body.email
# user found
if user
avatar = await do user.getPharmacy
console.log avatar
# check user password
if <PASSWORD>.compareSync req.body.password, user.password
# create token
token = jwt.sign id:user.id, config.jwt.secret
# send response
res.status(200).json
auth: true
message: "User authenticated successfully"
token: token
user:
firstName: <NAME>
lastName: <NAME>
return
# if error occured
res.status(401).json
auth: false
message: "Email or password incorrect" | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date Mar 2021
###
Validator = require 'validatorjs'
bcrypt = require 'bcrypt'
jwt = require 'jsonwebtoken'
config = require '../../config'
User = require '../models/user'
###
@api {post} /api/signup Registration
@apiName SignUp
@apiGroup User
@apiPermission public
@apiDescription
Register new user if not exist.
@apiParam {String} firstName User first name.
@apiParam {String} lastName User last name.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiParam {String{10}} [phone] User number phone.
@apiParam {String} [city] User city name.
@apiParam {String} [country] User country name.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"message": "User created successfully"
}
@apiErrorExample {json} Error-Email-exists
HTTP/1.1 401
{
"message": "Email already exists"
}
@apiErrorExample {json} Error-Data
HTTP/1.1 400
{
"message": "Data errors"
"errors": errors list
"errorCount": number of errors
}
###
module.exports.signup = (req, res) ->
# user validation rules
userRules =
firstName: 'required|string'
lastName: 'required|string'
email: 'required|email'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
phone: 'size:10'
city: 'string'
country: 'string'
# validate request data
validation = new Validator req.body, userRules
# when data not validated
unless do validation.passes
res.status(400).json
message: "Data errors"
errors: do validation.errors.all
errorCount: validation.errorCount
# data are validated
else
# check if user already registred
user = await User.findOne where: email: req.body.email
# user not found
unless user
# hash given password
req.body.password = PI:PASSWORD:<PASSWORD>END_PI.hashSync req.body.password, config.bcrypt.salt
# add user to database
user = await User.create req.body
# check if user created
if user
res.status(200).json
message: "User created successfully"
# user found
else
res.status(401).json
message: "Email already exists"
return
###
@api {post} /api/signin Authentification
@apiName SignIn
@apiGroup User
@apiPermission public
@apiDescription
Authentify a user with his email and password, and return a token.
@apiParam {String} email User email addresse.
@apiParam {String} password User password.
@apiSuccessExample {json} Success-Response
HTTP/1.1 200
{
"auth": true
"message": "User authenticated successfully"
"token": token
"user":
"firstName": user firstName
"lastName": user lastName
}
@apiErrorExample {json} Error-Response
HTTP/1.1 401
{
"auth": false
"message": "Email or password incorrect"
}
###
module.exports.signin = (req, res) ->
# find user
user = await User.findOne where: email: req.body.email
# user found
if user
avatar = await do user.getPharmacy
console.log avatar
# check user password
if PI:PASSWORD:<PASSWORD>END_PI.compareSync req.body.password, user.password
# create token
token = jwt.sign id:user.id, config.jwt.secret
# send response
res.status(200).json
auth: true
message: "User authenticated successfully"
token: token
user:
firstName: PI:NAME:<NAME>END_PI
lastName: PI:NAME:<NAME>END_PI
return
# if error occured
res.status(401).json
auth: false
message: "Email or password incorrect" |
[
{
"context": "# grunt-react-render\n# https://github.com/alexander/grunt-react-render\n\n# Copyright (c) 2014 AlexMost",
"end": 51,
"score": 0.9996994733810425,
"start": 42,
"tag": "USERNAME",
"value": "alexander"
},
{
"context": "alexander/grunt-react-render\n\n# Copyright (c) 2014 ... | src/lib.coffee | AlexMost/grunt-react-render | 1 | # grunt-react-render
# https://github.com/alexander/grunt-react-render
# Copyright (c) 2014 AlexMost
# Licensed under the MIT license.
cheerio = require 'cheerio'
fs = require 'fs'
React = require 'react'
path = require 'path'
renderComponent = (componentPath, componentProps) ->
component = require componentPath
props = componentProps or {}
React.renderToString(React.createElement(component, props))
processFile = (filePath, destPath, cb) ->
basedir = process.cwd()
fs.readFile filePath, (err, content) ->
cb(new Error err) if err
$ = cheerio.load content.toString()
$('*[data-rcomp]').each (index, comp) ->
comp_path = path.resolve(basedir, $(comp).data().rcomp)
$(comp).html(renderComponent(comp_path, $(comp).data().rprop))
fs.writeFile destPath, $.html(), cb
module.exports = {processFile} | 11388 | # grunt-react-render
# https://github.com/alexander/grunt-react-render
# Copyright (c) 2014 <NAME>
# Licensed under the MIT license.
cheerio = require 'cheerio'
fs = require 'fs'
React = require 'react'
path = require 'path'
renderComponent = (componentPath, componentProps) ->
component = require componentPath
props = componentProps or {}
React.renderToString(React.createElement(component, props))
processFile = (filePath, destPath, cb) ->
basedir = process.cwd()
fs.readFile filePath, (err, content) ->
cb(new Error err) if err
$ = cheerio.load content.toString()
$('*[data-rcomp]').each (index, comp) ->
comp_path = path.resolve(basedir, $(comp).data().rcomp)
$(comp).html(renderComponent(comp_path, $(comp).data().rprop))
fs.writeFile destPath, $.html(), cb
module.exports = {processFile} | true | # grunt-react-render
# https://github.com/alexander/grunt-react-render
# Copyright (c) 2014 PI:NAME:<NAME>END_PI
# Licensed under the MIT license.
cheerio = require 'cheerio'
fs = require 'fs'
React = require 'react'
path = require 'path'
renderComponent = (componentPath, componentProps) ->
component = require componentPath
props = componentProps or {}
React.renderToString(React.createElement(component, props))
processFile = (filePath, destPath, cb) ->
basedir = process.cwd()
fs.readFile filePath, (err, content) ->
cb(new Error err) if err
$ = cheerio.load content.toString()
$('*[data-rcomp]').each (index, comp) ->
comp_path = path.resolve(basedir, $(comp).data().rcomp)
$(comp).html(renderComponent(comp_path, $(comp).data().rprop))
fs.writeFile destPath, $.html(), cb
module.exports = {processFile} |
[
{
"context": "mon')\n\ntest 'escape-test', [\n {\n Name: 'Bob'\n Company: 'Big & Boring'\n }\n {\n ",
"end": 74,
"score": 0.9998717308044434,
"start": 71,
"tag": "NAME",
"value": "Bob"
},
{
"context": "ompany: 'Big & Boring'\n }\n {\n Name: 'Ali... | test/escape_test.coffee | SBeyeMHP/node-xlsx-writestream | 42 | test = require('./common')
test 'escape-test', [
{
Name: 'Bob'
Company: 'Big & Boring'
}
{
Name: 'Alice'
Company: 'The <special> company!'
}
{
Name: 'Clive'
Company: 'We "love" quotes.'
}
]
| 186403 | test = require('./common')
test 'escape-test', [
{
Name: '<NAME>'
Company: 'Big & Boring'
}
{
Name: '<NAME>'
Company: 'The <special> company!'
}
{
Name: '<NAME>'
Company: 'We "love" quotes.'
}
]
| true | test = require('./common')
test 'escape-test', [
{
Name: 'PI:NAME:<NAME>END_PI'
Company: 'Big & Boring'
}
{
Name: 'PI:NAME:<NAME>END_PI'
Company: 'The <special> company!'
}
{
Name: 'PI:NAME:<NAME>END_PI'
Company: 'We "love" quotes.'
}
]
|
[
{
"context": "描述\"\n\t\t\n\t\tpersonal_name:\n\t\t\ttype:\"text\"\n\t\t\tlabel:\"人名\"\n\t\t\tgroup:\"内容描述\"\n\t\t\n\t\tdocument_number:\n\t\t\ttype:\"t",
"end": 7477,
"score": 0.7247557044029236,
"start": 7476,
"tag": "NAME",
"value": "名"
},
{
"context": "\ttype:\"text\"\n\t\t\tlabel... | packages/steedos-app-archive/models/archive_manage/archive_wenshu.coffee | zonglu233/fuel-car | 0 | # 设置保管期限
set_retention = (doc)->
rules = Creator.Collections["archive_rules"].find({ fieldname: 'title'},{ fields:{ keywords: 1,retention:1 } } ).fetch()
if rules
rules_keywords = _.pluck rules, "keywords"
else
rules_keywords = []
# 所有规则关键词
i = 0
while i < rules_keywords.length
is_matched = true
j = 0
arrs = rules_keywords[i]
while j < arrs.length
if doc.title.indexOf(arrs[j])<0
is_matched = false
break;
j++
if is_matched
retention_id = rules[i].retention
break;
i++
# 保管期限表
if retention_id
retention = Creator.Collections["archive_retention"].findOne({_id:retention_id})
else
retention = Creator.Collections["archive_retention"].findOne({is_default:true})
# 设置保管期限和销毁日期
if retention?.years
# 没有文件日期默认为当前日期
if !doc.document_date
doc.document_date = new Date()
duration = retention?.years
year = doc.document_date?.getFullYear() + duration
month = doc.document_date?.getMonth()
day = doc.document_date?.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
retention: retention,
destroy_date: destroy_date,
destroy_date_timestamp: destroy_date_timestamp
}
})
# 设置类别号
set_category_code = (doc)->
# 根据归档部门确定类别号
if doc?.archive_dept
keyword = doc?.archive_dept
classification = Creator.Collections["archive_classification"].findOne({keywords: keyword})
if classification?._id
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
category_code:classification?._id
}
})
# 设置初始条件
set_init = (record_id)->
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
is_received: false
is_destroyed: false
is_borrowed: false
}
})
# 设置电子文件号
set_electronic_record_code = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,year:1}})
if record?.fonds_name and record?.year
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
count = Creator.Collections["archive_wenshu"].find({year:record?.year}).count()
strcount = "0000000" + count
count_code = strcount.substr(strcount.length-6)
electronic_record_code = fonds_code + "WS" + record?.year + count_code
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
electronic_record_code: electronic_record_code
}
})
set_company = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.fonds_name
fonds_company = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{company:1}})?.company
if fonds_company
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
company: fonds_company
}
})
# 设置档号
set_archivecode = (record_id)->
console.log "修改档号"
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.item_number and record?.fonds_name and record?.retention_peroid and record?.year and record?.organizational_structure
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
retention_peroid_code = Creator.Collections["archive_retention"].findOne(record.retention_peroid,{fields:{code:1}})?.code
organizational_structure_code = Creator.Collections["archive_organization"].findOne(record.organizational_structure,{fields:{code:1}})?.code
year = record.year
item_number = (Array(6).join('0') + record.item_number).slice(-4)
if fonds_code and year and retention_peroid_code and item_number
if organizational_structure_code
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + organizational_structure_code + "-"+item_number
else
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + item_number
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
archival_code:archive_code
}
})
set_destory = (doc)->
if doc?.retention_peroid and doc?.document_date
duration = Creator.Collections["archive_retention"].findOne({_id:doc.retention_peroid})?.years
if duration
year = doc.document_date.getFullYear()+duration
month = doc.document_date.getMonth()
day = doc.document_date.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update({_id:doc._id},
{
$set:{
destroy_date:destroy_date,
destroy_date_timestamp:destroy_date_timestamp
}
})
# 日志记录
set_audit = (record_id, space, userId)->
doc = {
business_status: "历史行为",
business_activity: "修改文书档案",
action_time: new Date(),
action_user: userId,
action_mandate: "",
action_description: "",
action_administrative_records_id: record_id,
created_by: userId,
created: new Date(),
owner: userId,
space: space
}
Creator.Collections["archive_audit"].insert(doc)
# 设置重新封装
set_hasXml = (record_id)->
Creator.Collections["archive_wenshu"].direct.update({_id:record_id},
{
$set:{
has_xml:false
}
})
Creator.Objects.archive_wenshu =
name: "archive_wenshu"
icon: "record"
label: "文书简化"
enable_search: true
enable_files: true
enable_api: true
enable_tree: false
filter_company: true
fields:
archival_category_code:
type: "text"
label:"档案门类代码"
defaultValue: "WS"
fonds_constituting_unit_name:
type:"text"
label:"立档单位名称"
defaultValue: "河北港口集团有限公司"
aggregation_level:
type: "select"
label:"聚合层次"
defaultValue: "文件"
options:[
{label:"案卷",value:"案卷"},
{label:"文件",value:"文件"}],
allowedValues:["案卷","文件"]
electronic_record_code:
type: "text"
label:"电子文件号"
omit:true
archival_code:
type:"text"
label:"档号"
is_wide:true
omit:true
group:"档号"
fonds_name:
type:"master_detail"
label:"全宗名称"
reference_to:"archive_fonds"
group:"档号"
year:
type: "text"
label:"年度"
sortable:true
group:"档号"
retention_peroid:
type:"master_detail"
label:"保管期限"
reference_to:"archive_retention"
sortable:true
group:"档号"
organizational_structure:
type:"master_detail"
label:"机构"
reference_to: "archive_organization"
group:"档号"
category_code:
type:"master_detail"
label:"类别号"
reference_to: "archive_classification"
group:"档号"
item_number:
type: "number"
label:"件号"
sortable:true
group:"档号"
document_sequence_number:
type: "number"
label:"文档序号"
group:"档号"
title:
type:"textarea"
label:"题名"
is_wide:true
is_name:true
required:true
searchable:true
group:"内容描述"
parallel_title:
type: "text"
label:"并列题名"
group:"内容描述"
other_title_information:
type:"text"
label:"说明题名文字"
group:"内容描述"
annex_title:
type:"textarea"
label:"附件题名"
group:"内容描述"
main_dept:
type:"text",
label:"主办部室"
group:"内容描述"
descriptor:
type:"text"
label:"主题词"
is_wide:true
group:"内容描述"
keyword:
type:"text"
label:"关键词"
omit:true
group:"内容描述"
abstract:
type:"text"
label:"摘要"
group:"内容描述"
personal_name:
type:"text"
label:"人名"
group:"内容描述"
document_number:
type:"text"
label:"文件编号"
group:"内容描述"
author:
type:"text"
label:"责任者"
group:"内容描述"
document_date:
type:"date"
label:"文件日期"
format:"YYYYMMDD"
sortable:true
group:"内容描述"
prinpipal_receiver:
type:"text",
label:"主送",
is_wide:true
group:"内容描述"
other_receivers:
type:"text",
label:"抄送",
group:"内容描述"
report:
type:"text",
label:"抄报",
group:"内容描述"
security_classification:
type:"select"
label:"密级"
defaultValue:"公开"
options: [
{label: "公开", value: "公开"},
{label: "限制", value: "限制"},
{label: "秘密", value: "秘密"},
{label: "机密", value: "机密"},
{label: "绝密", value: "绝密"},
{label: "非密", value: "非密"},
{label: "普通", value: "普通"}
]
allowedValues:["公开","限制","秘密","机密","绝密","非密","普通"]
group:"内容描述"
secrecy_period:
type:"select"
label:"保密期限"
options: [
{label: "10年", value: "10年"},
{label: "20年", value: "20年"},
{label: "30年", value: "30年"}
],
allowedValues:["10年","20年","30年"],
group:"内容描述"
applicant_organization_name:
type:"text"
label:"拟稿单位"
group:"内容描述"
applicant_name:
type:"text"
label:"拟稿人"
group:"内容描述"
reference:
type: "text"
label:"参见"
group:"内容描述"
destroy_date:
type:"date"
label:"销毁期限"
format:"YYYYMMDD"
omit:true
group:"内容描述"
destroy_date_timestamp:
type:"number"
label:"销毁期限时间戳"
hidden:true
group:"内容描述"
annotation:
type:"textarea",
label:"备注"
is_wide:true
group:"内容描述"
document_aggregation:
type:"select",
label:"文件组合类型",
defaultValue: "单件"
options: [
{label: "单件", value: "单件"},
{label: "组合文件", value: "组合文件"}
],
allowedValues:["单件","组合文件"],
group:"形式特征"
total_number_of_pages:
type:"number"
label:"页数"
group:"形式特征"
language:
type:"text"
label:"语种"
defaultValue: "汉语"
group:"形式特征"
document_type:
type:"text"
label:"文件类型"
group:"形式特征"
produce_flag:
type:"select",
label:"处理标志",
defaultValue: "在档"
options: [
{label: "在档", value: "在档"},
{label: "暂存", value: "暂存"},
{label: "移出", value: "移出"},
{label: "销毁", value: "销毁"},
{label: "出借", value: "出借"}
],
allowedValues:["在档","暂存","移出","销毁","出借"],
group:"形式特征"
orignal_document_creation_way:
type:"text"
label:"电子档案生成方式"
defaultValue: "原生"
options: [
{label: "数字化", value: "数字化"},
{label: "原生", value: "原生"}
],
allowedValues:["数字化","原生"],
group:"形式特征"
document_status:
type:"select",
label:"文件状态",
defaultValue: "电子归档"
options: [
{label: "不归档", value: "不归档"},
{label: "电子归档", value: "电子归档"},
{label: "暂存", value: "暂存"},
{label: "待归档", value: "待归档"},
{label: "实物归档", value: "实物归档"}
]
allowedValues:["不归档","电子归档","待归档","暂存","实物归档"]
group:"形式特征"
archive_dept:
type:"text"
label:"归档部门"
group:"形式特征"
archive_date:
type:"date"
label:"归档日期"
group:"形式特征"
signature_rules:
type:"text"
label:"签名规则"
omit:true
group:"电子签名"
signature_time:
type:"date"
label:"签名时间"
omit:true
group:"电子签名"
signer:
type:"text"
label:"签名人"
omit:true
group:"电子签名"
signature_algorithmidentifier:
type:"text"
label:"签名算法标识"
omit:true
group:"电子签名"
signature:
type:"text"
label:"签名结果"
omit:true
is_wide:true
group:"电子签名"
certificate:
type:"text"
label:"证书"
omit:true
is_wide:true
group:"电子签名"
certificate_reference:
type:"text"
label:"证书引证"
omit:true
group:"电子签名"
physical_record_characteristics:
type: "text"
label:"数字化对象形态"
defaultValue: "PDF"
group:"数字化属性"
scanning_resolution:
type: "text"
label:"扫描分辨率"
defaultValue: "220dpi"
group:"数字化属性"
scanning_color_model:
type: "text"
label:"扫描色彩模式"
defaultValue: "彩色"
group:"数字化属性"
image_compression_scheme:
type: "text"
label:"图像压缩方案"
defaultValue: "无损压缩"
group:"数字化属性"
device_type:
type: "text"
label:"设备类型"
defaultValue: ""
group:"数字化设备信息"
device_manufacturer:
type: "text"
label:"设备制造商"
defaultValue: ""
group:"数字化设备信息"
device_model_number:
type: "text"
label:"设备型号"
defaultValue: ""
group:"数字化设备信息"
device_model_serial_number:
type: "text"
label:"设备序列号"
defaultValue: ""
group:"数字化设备信息"
software_type:
type: "text"
label:"软件类型"
defaultValue: ""
group:"数字化设备信息"
software_name:
type: "text"
label:"软件名称"
defaultValue: ""
group:"数字化设备信息"
current_location:
type:"text"
label:"当前位置"
defaultValue:"\\\\192.168.0.151\\beta\\data\\oafile"
is_wide:true
group:"存储位置"
offline_medium_identifier:
type:"text"
label:"脱机载体编号"
group:"存储位置"
offline_medium_storage_location:
type:"text"
label:"脱机载体存址"
group:"存储位置"
intelligent_property_statement:
type: "text"
label:"知识产权说明"
group:"权限管理"
control_identifier:
type: "select"
label:"控制标识"
options: [
{label: "开放", value: "开放"},
{label: "控制", value: "控制"}
],
allowedValues:["开放","控制"],
group:"权限管理"
authorized_agent:
type: "text"
label:"授权对象"
group:"权限管理"
permission_assignment:
type: "select"
label:"授权行为"
options: [
{label: "公布", value: "公布"},
{label: "复制", value: "复制"},
{label: "浏览", value: "浏览"},
{label: "解密", value: "解密"}
],
allowedValues:["公布","复制","浏览","解密"],
group:"权限管理"
agent_type:
type: "select"
label:"机构人员类型"
defaultValue:"部门"
options: [
{label: "单位", value: "单位"},
{label: "部门", value: "部门"},
{label: "个人", value: "个人"}
],
allowedValues:["单位","部门","个人"],
group:"机构人员"
agent_name:
type: "text"
label:"机构人员名称"
group:"机构人员"
organization_code:
type: "text"
label:"组织机构代码"
group:"机构人员"
agent_belongs_to:
type: "text"
label:"机构人员隶属"
group:"机构人员"
related_archives:
label:'关联文件'
type:'lookup'
reference_to:'archive_wenshu'
multiple:true
is_wide:true
group:"关联文件"
old_id:
type:"text"
label:"老系统ID"
hidden: true
external_id:
type:"text"
label:'表单ID'
hidden: true
# 是否接收,默认是未接收
is_received:
type:"boolean"
label:"是否接收"
defaultValue:false
hidden: true
received:
type:"datetime"
label:"接收时间"
hidden: true
received_by:
type: "lookup"
label:"接收人"
reference_to: "users"
hidden: true
# 是否移交,默认是不存在,在“全部”视图下点击移交,进入“待移交”视图,此时is_transfer=false
# 审核通过之后,is_transfer = true
is_transfered:
type:"boolean"
label:"是否移交"
hidden: true
transfered:
type:"datetime"
label:"移交时间"
hidden: true
transfered_by:
type: "lookup"
label:"移交人"
reference_to: "users"
hidden: true
archive_transfer_id:
type:"master_detail"
label:"移交单"
reference_to:"archive_transfer"
group:"移交"
# 是否销毁,默认是不存在,在“全部”视图下点击销毁,进入“待销毁”视图,此时is_destroy=false
# 审核通过之后,is_transfer = true
is_destroyed:
type:"boolean"
label:'是否销毁'
hidden: true
destroyed:
type:"datetime"
label:'实际销毁时间'
hidden: true
destroyed_by:
type: "lookup"
label:"销毁人"
reference_to: "users"
hidden: true
archive_destroy_id:
type:"master_detail"
label:"销毁单"
filters:[["destroy_state", "$eq", "未销毁"]]
depend_on:["destroy_state"]
reference_to:"archive_destroy"
group:"销毁"
# 是否借阅
is_borrowed:
type:"boolean"
defaultValue:false
label:'是否借阅'
hidden: true
borrowed:
type:"datetime"
label:"借阅时间"
hidden: true
borrowed_by:
type: "lookup"
label:"借阅人"
reference_to: "users"
hidden: true
related_modified:
type:"datetime"
label:"附属更新时间"
hidden: true
has_xml:
type:"boolean"
label:"是否封装xml"
hidden: true
company:
type: "master_detail"
label: '所属公司'
reference_to: "organizations"
hidden: true
list_views:
# recent:
# label: "最近查看"
# filter_scope: "space"
all:
label: "全部"
filter_scope: "space"
filters: [["is_received", "=", true],["is_destroyed", "=", false]]
# columns:['item_number','archival_code',"author","title","electronic_record_code","total_number_of_pages","annotation",'archive_transfer_id']
columns:[
"year","retention_peroid","item_number",
"title","document_number","document_date",
"archive_dept","author"]
# borrow:
# label:"查看"
# filter_scope: "space"
# filters: [["is_received", "=", true]]
# columns:['document_sequence_number',"author","title","document_date","total_number_of_pages","annotation"]
receive:
label:"待接收"
filter_scope: "space"
filters: [["is_received", "=", false]]
# 已移交功能去掉===============
# transfered:
# label:"已移交"
# filter_scope: "space"
# filters: [["is_transfered", "=", true]]
# columns:["title","fonds_name","archive_transfer_id","transfered","transfered_by"]
destroy:
label:"待销毁"
filter_scope: "space"
filters: [["is_received", "=", true],["destroy_date_timestamp", "<=", new Date().getTime()],["is_destroyed", "=", false]]
columns:["year","title","document_date","destroy_date","archive_destroy_id"]
permission_set:
user:
allowCreate: false
allowDelete: false
allowEdit: false
allowRead: true
modifyAllRecords: false
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
admin:
allowCreate: true
allowDelete: true
allowEdit: true
allowRead: true
modifyAllRecords: true
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
triggers:
"after.insert.server.default":
on: "server"
when: "after.insert"
todo: (userId, doc)->
# 保存初始条件
set_init(doc._id)
# 设置电子文件号
set_electronic_record_code(doc._id)
# 设置公司
set_company(doc._id)
# 设置保管期限
set_retention(doc)
# 设置分类号
set_category_code(doc)
# 设置销毁期限
set_destory(doc)
# 设置重新封装
set_hasXml(doc._id)
return true
"after.update.server.default":
on: "server"
when: "after.update"
todo: (userId, doc, fieldNames, modifier, options)->
if modifier['$set']?.fonds_name
set_company(doc._id)
if modifier['$set']?.item_number or modifier['$set']?.organizational_structure or modifier['$set']?.retention_peroid or modifier['$set']?.fonds_name or modifier['$set']?.year
set_archivecode(doc._id)
if modifier['$set']?.retention_peroid || modifier['$set']?.document_date
set_destory(doc)
if modifier['$set']?.archive_dept # 设置分类号
set_category_code(doc)
# 设置重新封装
set_hasXml(doc._id)
# 日志记录
set_audit(doc?._id, doc?.space, userId)
actions:
number_adjuct:
label:'编号调整'
visible:true
on:'list'
todo:(object_name)->
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
init_num = prompt("输入初始件号值")
Meteor.call("archive_item_number",object_name,Creator.TabularSelectedIds?[object_name],init_num,
(error, result)->
if result
text = "编号已更新到" + result + "号"
swal(text)
)
receive:
label: "接收"
visible: true
on: "list"
todo:(object_name)->
if Session.get("list_view_id")== "receive"
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
space = Session.get("spaceId")
Meteor.call("archive_receive",object_name,Creator.TabularSelectedIds?[object_name],space,
(error,result) ->
if result
text = "共接收"+result[0]+"条,"+"成功"+result[1]+"条"
swal(text)
)
export2xml:
label:"导出XML"
visible:false
on: "list"
todo:(object_name, record_id)->
# 转为XML文件
Meteor.call("archive_export",object_name,
(error,result) ->
if result
text = "记录导出路径:"
swal(text + result)
)
borrow:
label:"借阅"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
borrower = Creator.Collections[object_name].findOne({_id:record_id})?.borrowed_by
if borrower == Meteor.userId()
swal("您已借阅了此档案,归还之前无需重复借阅")
return
doc = Archive.createBorrowObject(object_name, record_id)
Creator.createObject("archive_borrow",doc)
viewxml:
label:"查看XML"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
has_xml = Creator.Collections[object_name].findOne({_id:record_id})?.has_xml
if has_xml
window.location = Steedos.absoluteUrl "/view/encapsulation/xml?filename=#{record_id}.xml"
else
swal("该档案暂无XML封装文件") | 155209 | # 设置保管期限
set_retention = (doc)->
rules = Creator.Collections["archive_rules"].find({ fieldname: 'title'},{ fields:{ keywords: 1,retention:1 } } ).fetch()
if rules
rules_keywords = _.pluck rules, "keywords"
else
rules_keywords = []
# 所有规则关键词
i = 0
while i < rules_keywords.length
is_matched = true
j = 0
arrs = rules_keywords[i]
while j < arrs.length
if doc.title.indexOf(arrs[j])<0
is_matched = false
break;
j++
if is_matched
retention_id = rules[i].retention
break;
i++
# 保管期限表
if retention_id
retention = Creator.Collections["archive_retention"].findOne({_id:retention_id})
else
retention = Creator.Collections["archive_retention"].findOne({is_default:true})
# 设置保管期限和销毁日期
if retention?.years
# 没有文件日期默认为当前日期
if !doc.document_date
doc.document_date = new Date()
duration = retention?.years
year = doc.document_date?.getFullYear() + duration
month = doc.document_date?.getMonth()
day = doc.document_date?.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
retention: retention,
destroy_date: destroy_date,
destroy_date_timestamp: destroy_date_timestamp
}
})
# 设置类别号
set_category_code = (doc)->
# 根据归档部门确定类别号
if doc?.archive_dept
keyword = doc?.archive_dept
classification = Creator.Collections["archive_classification"].findOne({keywords: keyword})
if classification?._id
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
category_code:classification?._id
}
})
# 设置初始条件
set_init = (record_id)->
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
is_received: false
is_destroyed: false
is_borrowed: false
}
})
# 设置电子文件号
set_electronic_record_code = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,year:1}})
if record?.fonds_name and record?.year
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
count = Creator.Collections["archive_wenshu"].find({year:record?.year}).count()
strcount = "0000000" + count
count_code = strcount.substr(strcount.length-6)
electronic_record_code = fonds_code + "WS" + record?.year + count_code
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
electronic_record_code: electronic_record_code
}
})
set_company = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.fonds_name
fonds_company = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{company:1}})?.company
if fonds_company
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
company: fonds_company
}
})
# 设置档号
set_archivecode = (record_id)->
console.log "修改档号"
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.item_number and record?.fonds_name and record?.retention_peroid and record?.year and record?.organizational_structure
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
retention_peroid_code = Creator.Collections["archive_retention"].findOne(record.retention_peroid,{fields:{code:1}})?.code
organizational_structure_code = Creator.Collections["archive_organization"].findOne(record.organizational_structure,{fields:{code:1}})?.code
year = record.year
item_number = (Array(6).join('0') + record.item_number).slice(-4)
if fonds_code and year and retention_peroid_code and item_number
if organizational_structure_code
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + organizational_structure_code + "-"+item_number
else
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + item_number
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
archival_code:archive_code
}
})
set_destory = (doc)->
if doc?.retention_peroid and doc?.document_date
duration = Creator.Collections["archive_retention"].findOne({_id:doc.retention_peroid})?.years
if duration
year = doc.document_date.getFullYear()+duration
month = doc.document_date.getMonth()
day = doc.document_date.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update({_id:doc._id},
{
$set:{
destroy_date:destroy_date,
destroy_date_timestamp:destroy_date_timestamp
}
})
# 日志记录
set_audit = (record_id, space, userId)->
doc = {
business_status: "历史行为",
business_activity: "修改文书档案",
action_time: new Date(),
action_user: userId,
action_mandate: "",
action_description: "",
action_administrative_records_id: record_id,
created_by: userId,
created: new Date(),
owner: userId,
space: space
}
Creator.Collections["archive_audit"].insert(doc)
# 设置重新封装
set_hasXml = (record_id)->
Creator.Collections["archive_wenshu"].direct.update({_id:record_id},
{
$set:{
has_xml:false
}
})
Creator.Objects.archive_wenshu =
name: "archive_wenshu"
icon: "record"
label: "文书简化"
enable_search: true
enable_files: true
enable_api: true
enable_tree: false
filter_company: true
fields:
archival_category_code:
type: "text"
label:"档案门类代码"
defaultValue: "WS"
fonds_constituting_unit_name:
type:"text"
label:"立档单位名称"
defaultValue: "河北港口集团有限公司"
aggregation_level:
type: "select"
label:"聚合层次"
defaultValue: "文件"
options:[
{label:"案卷",value:"案卷"},
{label:"文件",value:"文件"}],
allowedValues:["案卷","文件"]
electronic_record_code:
type: "text"
label:"电子文件号"
omit:true
archival_code:
type:"text"
label:"档号"
is_wide:true
omit:true
group:"档号"
fonds_name:
type:"master_detail"
label:"全宗名称"
reference_to:"archive_fonds"
group:"档号"
year:
type: "text"
label:"年度"
sortable:true
group:"档号"
retention_peroid:
type:"master_detail"
label:"保管期限"
reference_to:"archive_retention"
sortable:true
group:"档号"
organizational_structure:
type:"master_detail"
label:"机构"
reference_to: "archive_organization"
group:"档号"
category_code:
type:"master_detail"
label:"类别号"
reference_to: "archive_classification"
group:"档号"
item_number:
type: "number"
label:"件号"
sortable:true
group:"档号"
document_sequence_number:
type: "number"
label:"文档序号"
group:"档号"
title:
type:"textarea"
label:"题名"
is_wide:true
is_name:true
required:true
searchable:true
group:"内容描述"
parallel_title:
type: "text"
label:"并列题名"
group:"内容描述"
other_title_information:
type:"text"
label:"说明题名文字"
group:"内容描述"
annex_title:
type:"textarea"
label:"附件题名"
group:"内容描述"
main_dept:
type:"text",
label:"主办部室"
group:"内容描述"
descriptor:
type:"text"
label:"主题词"
is_wide:true
group:"内容描述"
keyword:
type:"text"
label:"关键词"
omit:true
group:"内容描述"
abstract:
type:"text"
label:"摘要"
group:"内容描述"
personal_name:
type:"text"
label:"人<NAME>"
group:"内容描述"
document_number:
type:"text"
label:"文件编号"
group:"内容描述"
author:
type:"text"
label:"责任者"
group:"内容描述"
document_date:
type:"date"
label:"文件日期"
format:"YYYYMMDD"
sortable:true
group:"内容描述"
prinpipal_receiver:
type:"text",
label:"主送",
is_wide:true
group:"内容描述"
other_receivers:
type:"text",
label:"抄送",
group:"内容描述"
report:
type:"text",
label:"抄报",
group:"内容描述"
security_classification:
type:"select"
label:"密级"
defaultValue:"公开"
options: [
{label: "公开", value: "公开"},
{label: "限制", value: "限制"},
{label: "秘密", value: "秘密"},
{label: "机密", value: "机密"},
{label: "绝密", value: "绝密"},
{label: "非密", value: "非密"},
{label: "普通", value: "普通"}
]
allowedValues:["公开","限制","秘密","机密","绝密","非密","普通"]
group:"内容描述"
secrecy_period:
type:"select"
label:"保密期限"
options: [
{label: "10年", value: "10年"},
{label: "20年", value: "20年"},
{label: "30年", value: "30年"}
],
allowedValues:["10年","20年","30年"],
group:"内容描述"
applicant_organization_name:
type:"text"
label:"拟稿单位"
group:"内容描述"
applicant_name:
type:"text"
label:"拟稿人"
group:"内容描述"
reference:
type: "text"
label:"参见"
group:"内容描述"
destroy_date:
type:"date"
label:"销毁期限"
format:"YYYYMMDD"
omit:true
group:"内容描述"
destroy_date_timestamp:
type:"number"
label:"销毁期限时间戳"
hidden:true
group:"内容描述"
annotation:
type:"textarea",
label:"备注"
is_wide:true
group:"内容描述"
document_aggregation:
type:"select",
label:"文件组合类型",
defaultValue: "单件"
options: [
{label: "单件", value: "单件"},
{label: "组合文件", value: "组合文件"}
],
allowedValues:["单件","组合文件"],
group:"形式特征"
total_number_of_pages:
type:"number"
label:"页数"
group:"形式特征"
language:
type:"text"
label:"语种"
defaultValue: "汉语"
group:"形式特征"
document_type:
type:"text"
label:"文件类型"
group:"形式特征"
produce_flag:
type:"select",
label:"处理标志",
defaultValue: "在档"
options: [
{label: "在档", value: "在档"},
{label: "暂存", value: "暂存"},
{label: "移出", value: "移出"},
{label: "销毁", value: "销毁"},
{label: "出借", value: "出借"}
],
allowedValues:["在档","暂存","移出","销毁","出借"],
group:"形式特征"
orignal_document_creation_way:
type:"text"
label:"电子档案生成方式"
defaultValue: "原生"
options: [
{label: "数字化", value: "数字化"},
{label: "原生", value: "原生"}
],
allowedValues:["数字化","原生"],
group:"形式特征"
document_status:
type:"select",
label:"文件状态",
defaultValue: "电子归档"
options: [
{label: "不归档", value: "不归档"},
{label: "电子归档", value: "电子归档"},
{label: "暂存", value: "暂存"},
{label: "待归档", value: "待归档"},
{label: "实物归档", value: "实物归档"}
]
allowedValues:["不归档","电子归档","待归档","暂存","实物归档"]
group:"形式特征"
archive_dept:
type:"text"
label:"归档部门"
group:"形式特征"
archive_date:
type:"date"
label:"归档日期"
group:"形式特征"
signature_rules:
type:"text"
label:"签名规则"
omit:true
group:"电子签名"
signature_time:
type:"date"
label:"签名时间"
omit:true
group:"电子签名"
signer:
type:"text"
label:"签名人"
omit:true
group:"电子签名"
signature_algorithmidentifier:
type:"text"
label:"签名算法标识"
omit:true
group:"电子签名"
signature:
type:"text"
label:"签名结果"
omit:true
is_wide:true
group:"电子签名"
certificate:
type:"text"
label:"证书"
omit:true
is_wide:true
group:"电子签名"
certificate_reference:
type:"text"
label:"证书引证"
omit:true
group:"电子签名"
physical_record_characteristics:
type: "text"
label:"数字化对象形态"
defaultValue: "PDF"
group:"数字化属性"
scanning_resolution:
type: "text"
label:"扫描分辨率"
defaultValue: "220dpi"
group:"数字化属性"
scanning_color_model:
type: "text"
label:"扫描色彩模式"
defaultValue: "彩色"
group:"数字化属性"
image_compression_scheme:
type: "text"
label:"图像压缩方案"
defaultValue: "无损压缩"
group:"数字化属性"
device_type:
type: "text"
label:"设备类型"
defaultValue: ""
group:"数字化设备信息"
device_manufacturer:
type: "text"
label:"设备制造商"
defaultValue: ""
group:"数字化设备信息"
device_model_number:
type: "text"
label:"设备型号"
defaultValue: ""
group:"数字化设备信息"
device_model_serial_number:
type: "text"
label:"设备序列号"
defaultValue: ""
group:"数字化设备信息"
software_type:
type: "text"
label:"软件类型"
defaultValue: ""
group:"数字化设备信息"
software_name:
type: "text"
label:"软件名称"
defaultValue: ""
group:"数字化设备信息"
current_location:
type:"text"
label:"当前位置"
defaultValue:"\\\\192.168.0.151\\beta\\data\\oafile"
is_wide:true
group:"存储位置"
offline_medium_identifier:
type:"text"
label:"脱机载体编号"
group:"存储位置"
offline_medium_storage_location:
type:"text"
label:"脱机载体存址"
group:"存储位置"
intelligent_property_statement:
type: "text"
label:"知识产权说明"
group:"权限管理"
control_identifier:
type: "select"
label:"控制标识"
options: [
{label: "开放", value: "开放"},
{label: "控制", value: "控制"}
],
allowedValues:["开放","控制"],
group:"权限管理"
authorized_agent:
type: "text"
label:"授权对象"
group:"权限管理"
permission_assignment:
type: "select"
label:"授权行为"
options: [
{label: "公布", value: "公布"},
{label: "复制", value: "复制"},
{label: "浏览", value: "浏览"},
{label: "解密", value: "解密"}
],
allowedValues:["公布","复制","浏览","解密"],
group:"权限管理"
agent_type:
type: "select"
label:"机构人员类型"
defaultValue:"部门"
options: [
{label: "单位", value: "单位"},
{label: "部门", value: "部门"},
{label: "个人", value: "个人"}
],
allowedValues:["单位","部门","个人"],
group:"机构人员"
agent_name:
type: "text"
label:"机构人员名称"
group:"机构人员"
organization_code:
type: "text"
label:"组织机构代码"
group:"机构人员"
agent_belongs_to:
type: "text"
label:"机构人员隶属"
group:"机构人员"
related_archives:
label:'关联文件'
type:'lookup'
reference_to:'archive_wenshu'
multiple:true
is_wide:true
group:"关联文件"
old_id:
type:"text"
label:"老系统ID"
hidden: true
external_id:
type:"text"
label:'表单ID'
hidden: true
# 是否接收,默认是未接收
is_received:
type:"boolean"
label:"是否接收"
defaultValue:false
hidden: true
received:
type:"datetime"
label:"接收时间"
hidden: true
received_by:
type: "lookup"
label:"接收人"
reference_to: "users"
hidden: true
# 是否移交,默认是不存在,在“全部”视图下点击移交,进入“待移交”视图,此时is_transfer=false
# 审核通过之后,is_transfer = true
is_transfered:
type:"boolean"
label:"是否移交"
hidden: true
transfered:
type:"datetime"
label:"移交时间"
hidden: true
transfered_by:
type: "lookup"
label:"移交人"
reference_to: "users"
hidden: true
archive_transfer_id:
type:"master_detail"
label:"移交单"
reference_to:"archive_transfer"
group:"移交"
# 是否销毁,默认是不存在,在“全部”视图下点击销毁,进入“待销毁”视图,此时is_destroy=false
# 审核通过之后,is_transfer = true
is_destroyed:
type:"boolean"
label:'是否销毁'
hidden: true
destroyed:
type:"datetime"
label:'实际销毁时间'
hidden: true
destroyed_by:
type: "lookup"
label:"销毁人"
reference_to: "users"
hidden: true
archive_destroy_id:
type:"master_detail"
label:"销毁单"
filters:[["destroy_state", "$eq", "未销毁"]]
depend_on:["destroy_state"]
reference_to:"archive_destroy"
group:"销毁"
# 是否借阅
is_borrowed:
type:"boolean"
defaultValue:false
label:'是否借阅'
hidden: true
borrowed:
type:"datetime"
label:"借阅时间"
hidden: true
borrowed_by:
type: "lookup"
label:"借阅人"
reference_to: "users"
hidden: true
related_modified:
type:"datetime"
label:"附属更新时间"
hidden: true
has_xml:
type:"boolean"
label:"是否封装xml"
hidden: true
company:
type: "master_detail"
label: '所属公司'
reference_to: "organizations"
hidden: true
list_views:
# recent:
# label: "最近查看"
# filter_scope: "space"
all:
label: "全部"
filter_scope: "space"
filters: [["is_received", "=", true],["is_destroyed", "=", false]]
# columns:['item_number','archival_code',"author","title","electronic_record_code","total_number_of_pages","annotation",'archive_transfer_id']
columns:[
"year","retention_peroid","item_number",
"title","document_number","document_date",
"archive_dept","author"]
# borrow:
# label:"查看"
# filter_scope: "space"
# filters: [["is_received", "=", true]]
# columns:['document_sequence_number',"author","title","document_date","total_number_of_pages","annotation"]
receive:
label:"待接收"
filter_scope: "space"
filters: [["is_received", "=", false]]
# 已移交功能去掉===============
# transfered:
# label:"已移交"
# filter_scope: "space"
# filters: [["is_transfered", "=", true]]
# columns:["title","fonds_name","archive_transfer_id","transfered","transfered_by"]
destroy:
label:"待销毁"
filter_scope: "space"
filters: [["is_received", "=", true],["destroy_date_timestamp", "<=", new Date().getTime()],["is_destroyed", "=", false]]
columns:["year","title","document_date","destroy_date","archive_destroy_id"]
permission_set:
user:
allowCreate: false
allowDelete: false
allowEdit: false
allowRead: true
modifyAllRecords: false
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
admin:
allowCreate: true
allowDelete: true
allowEdit: true
allowRead: true
modifyAllRecords: true
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
triggers:
"after.insert.server.default":
on: "server"
when: "after.insert"
todo: (userId, doc)->
# 保存初始条件
set_init(doc._id)
# 设置电子文件号
set_electronic_record_code(doc._id)
# 设置公司
set_company(doc._id)
# 设置保管期限
set_retention(doc)
# 设置分类号
set_category_code(doc)
# 设置销毁期限
set_destory(doc)
# 设置重新封装
set_hasXml(doc._id)
return true
"after.update.server.default":
on: "server"
when: "after.update"
todo: (userId, doc, fieldNames, modifier, options)->
if modifier['$set']?.fonds_name
set_company(doc._id)
if modifier['$set']?.item_number or modifier['$set']?.organizational_structure or modifier['$set']?.retention_peroid or modifier['$set']?.fonds_name or modifier['$set']?.year
set_archivecode(doc._id)
if modifier['$set']?.retention_peroid || modifier['$set']?.document_date
set_destory(doc)
if modifier['$set']?.archive_dept # 设置分类号
set_category_code(doc)
# 设置重新封装
set_hasXml(doc._id)
# 日志记录
set_audit(doc?._id, doc?.space, userId)
actions:
number_adjuct:
label:'编号调整'
visible:true
on:'list'
todo:(object_name)->
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
init_num = prompt("输入初始件号值")
Meteor.call("archive_item_number",object_name,Creator.TabularSelectedIds?[object_name],init_num,
(error, result)->
if result
text = "编号已更新到" + result + "号"
swal(text)
)
receive:
label: "接收"
visible: true
on: "list"
todo:(object_name)->
if Session.get("list_view_id")== "receive"
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
space = Session.get("spaceId")
Meteor.call("archive_receive",object_name,Creator.TabularSelectedIds?[object_name],space,
(error,result) ->
if result
text = "共接收"+result[0]+"条,"+"成功"+result[1]+"条"
swal(text)
)
export2xml:
label:"导出XML"
visible:false
on: "list"
todo:(object_name, record_id)->
# 转为XML文件
Meteor.call("archive_export",object_name,
(error,result) ->
if result
text = "记录导出路径:"
swal(text + result)
)
borrow:
label:"借阅"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
borrower = Creator.Collections[object_name].findOne({_id:record_id})?.borrowed_by
if borrower == Meteor.userId()
swal("您已借阅了此档案,归还之前无需重复借阅")
return
doc = Archive.createBorrowObject(object_name, record_id)
Creator.createObject("archive_borrow",doc)
viewxml:
label:"查看XML"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
has_xml = Creator.Collections[object_name].findOne({_id:record_id})?.has_xml
if has_xml
window.location = Steedos.absoluteUrl "/view/encapsulation/xml?filename=#{record_id}.xml"
else
swal("该档案暂无XML封装文件") | true | # 设置保管期限
set_retention = (doc)->
rules = Creator.Collections["archive_rules"].find({ fieldname: 'title'},{ fields:{ keywords: 1,retention:1 } } ).fetch()
if rules
rules_keywords = _.pluck rules, "keywords"
else
rules_keywords = []
# 所有规则关键词
i = 0
while i < rules_keywords.length
is_matched = true
j = 0
arrs = rules_keywords[i]
while j < arrs.length
if doc.title.indexOf(arrs[j])<0
is_matched = false
break;
j++
if is_matched
retention_id = rules[i].retention
break;
i++
# 保管期限表
if retention_id
retention = Creator.Collections["archive_retention"].findOne({_id:retention_id})
else
retention = Creator.Collections["archive_retention"].findOne({is_default:true})
# 设置保管期限和销毁日期
if retention?.years
# 没有文件日期默认为当前日期
if !doc.document_date
doc.document_date = new Date()
duration = retention?.years
year = doc.document_date?.getFullYear() + duration
month = doc.document_date?.getMonth()
day = doc.document_date?.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
retention: retention,
destroy_date: destroy_date,
destroy_date_timestamp: destroy_date_timestamp
}
})
# 设置类别号
set_category_code = (doc)->
# 根据归档部门确定类别号
if doc?.archive_dept
keyword = doc?.archive_dept
classification = Creator.Collections["archive_classification"].findOne({keywords: keyword})
if classification?._id
Creator.Collections["archive_wenshu"].direct.update(doc._id,
{
$set:{
category_code:classification?._id
}
})
# 设置初始条件
set_init = (record_id)->
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
is_received: false
is_destroyed: false
is_borrowed: false
}
})
# 设置电子文件号
set_electronic_record_code = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,year:1}})
if record?.fonds_name and record?.year
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
count = Creator.Collections["archive_wenshu"].find({year:record?.year}).count()
strcount = "0000000" + count
count_code = strcount.substr(strcount.length-6)
electronic_record_code = fonds_code + "WS" + record?.year + count_code
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
electronic_record_code: electronic_record_code
}
})
set_company = (record_id)->
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.fonds_name
fonds_company = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{company:1}})?.company
if fonds_company
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
company: fonds_company
}
})
# 设置档号
set_archivecode = (record_id)->
console.log "修改档号"
record = Creator.Collections["archive_wenshu"].findOne(record_id,{fields:{fonds_name:1,retention_peroid:1,organizational_structure:1,year:1,item_number:1}})
if record?.item_number and record?.fonds_name and record?.retention_peroid and record?.year and record?.organizational_structure
fonds_code = Creator.Collections["archive_fonds"].findOne(record.fonds_name,{fields:{code:1}})?.code
retention_peroid_code = Creator.Collections["archive_retention"].findOne(record.retention_peroid,{fields:{code:1}})?.code
organizational_structure_code = Creator.Collections["archive_organization"].findOne(record.organizational_structure,{fields:{code:1}})?.code
year = record.year
item_number = (Array(6).join('0') + record.item_number).slice(-4)
if fonds_code and year and retention_peroid_code and item_number
if organizational_structure_code
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + organizational_structure_code + "-"+item_number
else
archive_code = fonds_code + "-WS" + "-" + year + "-"+ retention_peroid_code + "-" + item_number
Creator.Collections["archive_wenshu"].direct.update(record_id,
{
$set:{
archival_code:archive_code
}
})
set_destory = (doc)->
if doc?.retention_peroid and doc?.document_date
duration = Creator.Collections["archive_retention"].findOne({_id:doc.retention_peroid})?.years
if duration
year = doc.document_date.getFullYear()+duration
month = doc.document_date.getMonth()
day = doc.document_date.getDate()
destroy_date = new Date(year,month,day)
destroy_date_timestamp = parseInt(destroy_date?.getTime())
Creator.Collections["archive_wenshu"].direct.update({_id:doc._id},
{
$set:{
destroy_date:destroy_date,
destroy_date_timestamp:destroy_date_timestamp
}
})
# 日志记录
set_audit = (record_id, space, userId)->
doc = {
business_status: "历史行为",
business_activity: "修改文书档案",
action_time: new Date(),
action_user: userId,
action_mandate: "",
action_description: "",
action_administrative_records_id: record_id,
created_by: userId,
created: new Date(),
owner: userId,
space: space
}
Creator.Collections["archive_audit"].insert(doc)
# 设置重新封装
set_hasXml = (record_id)->
Creator.Collections["archive_wenshu"].direct.update({_id:record_id},
{
$set:{
has_xml:false
}
})
Creator.Objects.archive_wenshu =
name: "archive_wenshu"
icon: "record"
label: "文书简化"
enable_search: true
enable_files: true
enable_api: true
enable_tree: false
filter_company: true
fields:
archival_category_code:
type: "text"
label:"档案门类代码"
defaultValue: "WS"
fonds_constituting_unit_name:
type:"text"
label:"立档单位名称"
defaultValue: "河北港口集团有限公司"
aggregation_level:
type: "select"
label:"聚合层次"
defaultValue: "文件"
options:[
{label:"案卷",value:"案卷"},
{label:"文件",value:"文件"}],
allowedValues:["案卷","文件"]
electronic_record_code:
type: "text"
label:"电子文件号"
omit:true
archival_code:
type:"text"
label:"档号"
is_wide:true
omit:true
group:"档号"
fonds_name:
type:"master_detail"
label:"全宗名称"
reference_to:"archive_fonds"
group:"档号"
year:
type: "text"
label:"年度"
sortable:true
group:"档号"
retention_peroid:
type:"master_detail"
label:"保管期限"
reference_to:"archive_retention"
sortable:true
group:"档号"
organizational_structure:
type:"master_detail"
label:"机构"
reference_to: "archive_organization"
group:"档号"
category_code:
type:"master_detail"
label:"类别号"
reference_to: "archive_classification"
group:"档号"
item_number:
type: "number"
label:"件号"
sortable:true
group:"档号"
document_sequence_number:
type: "number"
label:"文档序号"
group:"档号"
title:
type:"textarea"
label:"题名"
is_wide:true
is_name:true
required:true
searchable:true
group:"内容描述"
parallel_title:
type: "text"
label:"并列题名"
group:"内容描述"
other_title_information:
type:"text"
label:"说明题名文字"
group:"内容描述"
annex_title:
type:"textarea"
label:"附件题名"
group:"内容描述"
main_dept:
type:"text",
label:"主办部室"
group:"内容描述"
descriptor:
type:"text"
label:"主题词"
is_wide:true
group:"内容描述"
keyword:
type:"text"
label:"关键词"
omit:true
group:"内容描述"
abstract:
type:"text"
label:"摘要"
group:"内容描述"
personal_name:
type:"text"
label:"人PI:NAME:<NAME>END_PI"
group:"内容描述"
document_number:
type:"text"
label:"文件编号"
group:"内容描述"
author:
type:"text"
label:"责任者"
group:"内容描述"
document_date:
type:"date"
label:"文件日期"
format:"YYYYMMDD"
sortable:true
group:"内容描述"
prinpipal_receiver:
type:"text",
label:"主送",
is_wide:true
group:"内容描述"
other_receivers:
type:"text",
label:"抄送",
group:"内容描述"
report:
type:"text",
label:"抄报",
group:"内容描述"
security_classification:
type:"select"
label:"密级"
defaultValue:"公开"
options: [
{label: "公开", value: "公开"},
{label: "限制", value: "限制"},
{label: "秘密", value: "秘密"},
{label: "机密", value: "机密"},
{label: "绝密", value: "绝密"},
{label: "非密", value: "非密"},
{label: "普通", value: "普通"}
]
allowedValues:["公开","限制","秘密","机密","绝密","非密","普通"]
group:"内容描述"
secrecy_period:
type:"select"
label:"保密期限"
options: [
{label: "10年", value: "10年"},
{label: "20年", value: "20年"},
{label: "30年", value: "30年"}
],
allowedValues:["10年","20年","30年"],
group:"内容描述"
applicant_organization_name:
type:"text"
label:"拟稿单位"
group:"内容描述"
applicant_name:
type:"text"
label:"拟稿人"
group:"内容描述"
reference:
type: "text"
label:"参见"
group:"内容描述"
destroy_date:
type:"date"
label:"销毁期限"
format:"YYYYMMDD"
omit:true
group:"内容描述"
destroy_date_timestamp:
type:"number"
label:"销毁期限时间戳"
hidden:true
group:"内容描述"
annotation:
type:"textarea",
label:"备注"
is_wide:true
group:"内容描述"
document_aggregation:
type:"select",
label:"文件组合类型",
defaultValue: "单件"
options: [
{label: "单件", value: "单件"},
{label: "组合文件", value: "组合文件"}
],
allowedValues:["单件","组合文件"],
group:"形式特征"
total_number_of_pages:
type:"number"
label:"页数"
group:"形式特征"
language:
type:"text"
label:"语种"
defaultValue: "汉语"
group:"形式特征"
document_type:
type:"text"
label:"文件类型"
group:"形式特征"
produce_flag:
type:"select",
label:"处理标志",
defaultValue: "在档"
options: [
{label: "在档", value: "在档"},
{label: "暂存", value: "暂存"},
{label: "移出", value: "移出"},
{label: "销毁", value: "销毁"},
{label: "出借", value: "出借"}
],
allowedValues:["在档","暂存","移出","销毁","出借"],
group:"形式特征"
orignal_document_creation_way:
type:"text"
label:"电子档案生成方式"
defaultValue: "原生"
options: [
{label: "数字化", value: "数字化"},
{label: "原生", value: "原生"}
],
allowedValues:["数字化","原生"],
group:"形式特征"
document_status:
type:"select",
label:"文件状态",
defaultValue: "电子归档"
options: [
{label: "不归档", value: "不归档"},
{label: "电子归档", value: "电子归档"},
{label: "暂存", value: "暂存"},
{label: "待归档", value: "待归档"},
{label: "实物归档", value: "实物归档"}
]
allowedValues:["不归档","电子归档","待归档","暂存","实物归档"]
group:"形式特征"
archive_dept:
type:"text"
label:"归档部门"
group:"形式特征"
archive_date:
type:"date"
label:"归档日期"
group:"形式特征"
signature_rules:
type:"text"
label:"签名规则"
omit:true
group:"电子签名"
signature_time:
type:"date"
label:"签名时间"
omit:true
group:"电子签名"
signer:
type:"text"
label:"签名人"
omit:true
group:"电子签名"
signature_algorithmidentifier:
type:"text"
label:"签名算法标识"
omit:true
group:"电子签名"
signature:
type:"text"
label:"签名结果"
omit:true
is_wide:true
group:"电子签名"
certificate:
type:"text"
label:"证书"
omit:true
is_wide:true
group:"电子签名"
certificate_reference:
type:"text"
label:"证书引证"
omit:true
group:"电子签名"
physical_record_characteristics:
type: "text"
label:"数字化对象形态"
defaultValue: "PDF"
group:"数字化属性"
scanning_resolution:
type: "text"
label:"扫描分辨率"
defaultValue: "220dpi"
group:"数字化属性"
scanning_color_model:
type: "text"
label:"扫描色彩模式"
defaultValue: "彩色"
group:"数字化属性"
image_compression_scheme:
type: "text"
label:"图像压缩方案"
defaultValue: "无损压缩"
group:"数字化属性"
device_type:
type: "text"
label:"设备类型"
defaultValue: ""
group:"数字化设备信息"
device_manufacturer:
type: "text"
label:"设备制造商"
defaultValue: ""
group:"数字化设备信息"
device_model_number:
type: "text"
label:"设备型号"
defaultValue: ""
group:"数字化设备信息"
device_model_serial_number:
type: "text"
label:"设备序列号"
defaultValue: ""
group:"数字化设备信息"
software_type:
type: "text"
label:"软件类型"
defaultValue: ""
group:"数字化设备信息"
software_name:
type: "text"
label:"软件名称"
defaultValue: ""
group:"数字化设备信息"
current_location:
type:"text"
label:"当前位置"
defaultValue:"\\\\192.168.0.151\\beta\\data\\oafile"
is_wide:true
group:"存储位置"
offline_medium_identifier:
type:"text"
label:"脱机载体编号"
group:"存储位置"
offline_medium_storage_location:
type:"text"
label:"脱机载体存址"
group:"存储位置"
intelligent_property_statement:
type: "text"
label:"知识产权说明"
group:"权限管理"
control_identifier:
type: "select"
label:"控制标识"
options: [
{label: "开放", value: "开放"},
{label: "控制", value: "控制"}
],
allowedValues:["开放","控制"],
group:"权限管理"
authorized_agent:
type: "text"
label:"授权对象"
group:"权限管理"
permission_assignment:
type: "select"
label:"授权行为"
options: [
{label: "公布", value: "公布"},
{label: "复制", value: "复制"},
{label: "浏览", value: "浏览"},
{label: "解密", value: "解密"}
],
allowedValues:["公布","复制","浏览","解密"],
group:"权限管理"
agent_type:
type: "select"
label:"机构人员类型"
defaultValue:"部门"
options: [
{label: "单位", value: "单位"},
{label: "部门", value: "部门"},
{label: "个人", value: "个人"}
],
allowedValues:["单位","部门","个人"],
group:"机构人员"
agent_name:
type: "text"
label:"机构人员名称"
group:"机构人员"
organization_code:
type: "text"
label:"组织机构代码"
group:"机构人员"
agent_belongs_to:
type: "text"
label:"机构人员隶属"
group:"机构人员"
related_archives:
label:'关联文件'
type:'lookup'
reference_to:'archive_wenshu'
multiple:true
is_wide:true
group:"关联文件"
old_id:
type:"text"
label:"老系统ID"
hidden: true
external_id:
type:"text"
label:'表单ID'
hidden: true
# 是否接收,默认是未接收
is_received:
type:"boolean"
label:"是否接收"
defaultValue:false
hidden: true
received:
type:"datetime"
label:"接收时间"
hidden: true
received_by:
type: "lookup"
label:"接收人"
reference_to: "users"
hidden: true
# 是否移交,默认是不存在,在“全部”视图下点击移交,进入“待移交”视图,此时is_transfer=false
# 审核通过之后,is_transfer = true
is_transfered:
type:"boolean"
label:"是否移交"
hidden: true
transfered:
type:"datetime"
label:"移交时间"
hidden: true
transfered_by:
type: "lookup"
label:"移交人"
reference_to: "users"
hidden: true
archive_transfer_id:
type:"master_detail"
label:"移交单"
reference_to:"archive_transfer"
group:"移交"
# 是否销毁,默认是不存在,在“全部”视图下点击销毁,进入“待销毁”视图,此时is_destroy=false
# 审核通过之后,is_transfer = true
is_destroyed:
type:"boolean"
label:'是否销毁'
hidden: true
destroyed:
type:"datetime"
label:'实际销毁时间'
hidden: true
destroyed_by:
type: "lookup"
label:"销毁人"
reference_to: "users"
hidden: true
archive_destroy_id:
type:"master_detail"
label:"销毁单"
filters:[["destroy_state", "$eq", "未销毁"]]
depend_on:["destroy_state"]
reference_to:"archive_destroy"
group:"销毁"
# 是否借阅
is_borrowed:
type:"boolean"
defaultValue:false
label:'是否借阅'
hidden: true
borrowed:
type:"datetime"
label:"借阅时间"
hidden: true
borrowed_by:
type: "lookup"
label:"借阅人"
reference_to: "users"
hidden: true
related_modified:
type:"datetime"
label:"附属更新时间"
hidden: true
has_xml:
type:"boolean"
label:"是否封装xml"
hidden: true
company:
type: "master_detail"
label: '所属公司'
reference_to: "organizations"
hidden: true
list_views:
# recent:
# label: "最近查看"
# filter_scope: "space"
all:
label: "全部"
filter_scope: "space"
filters: [["is_received", "=", true],["is_destroyed", "=", false]]
# columns:['item_number','archival_code',"author","title","electronic_record_code","total_number_of_pages","annotation",'archive_transfer_id']
columns:[
"year","retention_peroid","item_number",
"title","document_number","document_date",
"archive_dept","author"]
# borrow:
# label:"查看"
# filter_scope: "space"
# filters: [["is_received", "=", true]]
# columns:['document_sequence_number',"author","title","document_date","total_number_of_pages","annotation"]
receive:
label:"待接收"
filter_scope: "space"
filters: [["is_received", "=", false]]
# 已移交功能去掉===============
# transfered:
# label:"已移交"
# filter_scope: "space"
# filters: [["is_transfered", "=", true]]
# columns:["title","fonds_name","archive_transfer_id","transfered","transfered_by"]
destroy:
label:"待销毁"
filter_scope: "space"
filters: [["is_received", "=", true],["destroy_date_timestamp", "<=", new Date().getTime()],["is_destroyed", "=", false]]
columns:["year","title","document_date","destroy_date","archive_destroy_id"]
permission_set:
user:
allowCreate: false
allowDelete: false
allowEdit: false
allowRead: true
modifyAllRecords: false
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
admin:
allowCreate: true
allowDelete: true
allowEdit: true
allowRead: true
modifyAllRecords: true
viewAllRecords: true
list_views:["default","recent","all","borrow"]
actions:["borrow"]
triggers:
"after.insert.server.default":
on: "server"
when: "after.insert"
todo: (userId, doc)->
# 保存初始条件
set_init(doc._id)
# 设置电子文件号
set_electronic_record_code(doc._id)
# 设置公司
set_company(doc._id)
# 设置保管期限
set_retention(doc)
# 设置分类号
set_category_code(doc)
# 设置销毁期限
set_destory(doc)
# 设置重新封装
set_hasXml(doc._id)
return true
"after.update.server.default":
on: "server"
when: "after.update"
todo: (userId, doc, fieldNames, modifier, options)->
if modifier['$set']?.fonds_name
set_company(doc._id)
if modifier['$set']?.item_number or modifier['$set']?.organizational_structure or modifier['$set']?.retention_peroid or modifier['$set']?.fonds_name or modifier['$set']?.year
set_archivecode(doc._id)
if modifier['$set']?.retention_peroid || modifier['$set']?.document_date
set_destory(doc)
if modifier['$set']?.archive_dept # 设置分类号
set_category_code(doc)
# 设置重新封装
set_hasXml(doc._id)
# 日志记录
set_audit(doc?._id, doc?.space, userId)
actions:
number_adjuct:
label:'编号调整'
visible:true
on:'list'
todo:(object_name)->
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
init_num = prompt("输入初始件号值")
Meteor.call("archive_item_number",object_name,Creator.TabularSelectedIds?[object_name],init_num,
(error, result)->
if result
text = "编号已更新到" + result + "号"
swal(text)
)
receive:
label: "接收"
visible: true
on: "list"
todo:(object_name)->
if Session.get("list_view_id")== "receive"
if Creator.TabularSelectedIds?[object_name].length == 0
swal("请先选择要接收的档案")
return
space = Session.get("spaceId")
Meteor.call("archive_receive",object_name,Creator.TabularSelectedIds?[object_name],space,
(error,result) ->
if result
text = "共接收"+result[0]+"条,"+"成功"+result[1]+"条"
swal(text)
)
export2xml:
label:"导出XML"
visible:false
on: "list"
todo:(object_name, record_id)->
# 转为XML文件
Meteor.call("archive_export",object_name,
(error,result) ->
if result
text = "记录导出路径:"
swal(text + result)
)
borrow:
label:"借阅"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
borrower = Creator.Collections[object_name].findOne({_id:record_id})?.borrowed_by
if borrower == Meteor.userId()
swal("您已借阅了此档案,归还之前无需重复借阅")
return
doc = Archive.createBorrowObject(object_name, record_id)
Creator.createObject("archive_borrow",doc)
viewxml:
label:"查看XML"
visible:true
on: "record"
todo:(object_name, record_id, fields)->
has_xml = Creator.Collections[object_name].findOne({_id:record_id})?.has_xml
if has_xml
window.location = Steedos.absoluteUrl "/view/encapsulation/xml?filename=#{record_id}.xml"
else
swal("该档案暂无XML封装文件") |
[
{
"context": "###*\n * 多选开关\n * @author vfasky <vfasky@gmail.com>\n###\n'use strict'\n\n{Template} =",
"end": 30,
"score": 0.9996113777160645,
"start": 24,
"tag": "USERNAME",
"value": "vfasky"
},
{
"context": "###*\n * 多选开关\n * @author vfasky <vfasky@gmail.com>\n###\n'use strict'\n\n... | src/coffee/cellsSwitch.coffee | vfasky/mcore-weui | 0 | ###*
* 多选开关
* @author vfasky <vfasky@gmail.com>
###
'use strict'
{Template} = require 'mcore'
CellsCheckbox = require './cellsCheckbox'
class CellsSwitch extends CellsCheckbox
init: ->
@render require('../tpl/cellsSwitch.html')
Template.components['cells-switch'] = CellsSwitch
module.exports = CellsSwitch
| 185451 | ###*
* 多选开关
* @author vfasky <<EMAIL>>
###
'use strict'
{Template} = require 'mcore'
CellsCheckbox = require './cellsCheckbox'
class CellsSwitch extends CellsCheckbox
init: ->
@render require('../tpl/cellsSwitch.html')
Template.components['cells-switch'] = CellsSwitch
module.exports = CellsSwitch
| true | ###*
* 多选开关
* @author vfasky <PI:EMAIL:<EMAIL>END_PI>
###
'use strict'
{Template} = require 'mcore'
CellsCheckbox = require './cellsCheckbox'
class CellsSwitch extends CellsCheckbox
init: ->
@render require('../tpl/cellsSwitch.html')
Template.components['cells-switch'] = CellsSwitch
module.exports = CellsSwitch
|
[
{
"context": "bre\"\n contact: \"Contacto\"\n twitter_follow: \"Seguir\"\n\n forms:\n name: \"Nombre\"\n email: \"Email\"\n",
"end": 571,
"score": 0.9989833831787109,
"start": 565,
"tag": "USERNAME",
"value": "Seguir"
},
{
"context": "ulty: \"Dificultad: \"\n\n contact:... | app/locale/es-419.coffee | cochee/codecombat | 1 | module.exports = nativeDescription: "español (América Latina)", englishDescription: "Spanish (Latin America)", translation:
common:
loading: "Cargando..."
modal:
close: "Cerrar"
okay: "OK"
not_found:
page_not_found: "Pagina no encontrada"
nav:
sign_up: "Crear Cuenta"
log_in: "Entrar"
log_out: "Salir"
play: "Jugar"
editor: "Editor"
blog: "Blog"
forum: "Foro"
admin: "Admin"
home: "Inicio"
contribute: "Contribuir"
legal: "Legal"
about: "Sobre"
contact: "Contacto"
twitter_follow: "Seguir"
forms:
name: "Nombre"
email: "Email"
message: "Mensaje"
cancel: "Cancelar"
login:
log_in: "Iniciar sesión"
sign_up: "crear nueva cuenta"
or: ", o "
recover: "recuperar cuenta"
signup:
description: "Es gratis. Solo necesitas un par de cosas y estarás listo para comenzar:"
email_announcements: "Recibe noticias por email"
coppa: "más de 13 años o fuera de los Estados Unidos"
coppa_why: "(¿Por qué?)"
creating: "Creando Cuenta..."
sign_up: "Registrarse"
or: "o "
log_in: "Inicia sesión con tu contraseña"
home:
slogan: "Aprende a programar en JavaScript jugando"
no_ie: "¡Lo sentimos! CodeCombat no funciona en Internet Explorer 9 o versiones anteriores."
no_mobile: "¡CodeCombat no fue diseñado para dispositivos móviles y quizás no funcione!"
play: "Jugar"
play:
choose_your_level: "Elige tu nivel"
adventurer_prefix: "Puedes saltar a cualquier nivel de abajo, o discutir los niveles en "
adventurer_forum: "el foro del aventurero"
adventurer_suffix: "."
campaign_beginner: "Campaña para principiantes"
campaign_beginner_description: "... en la que aprendes la hechicería de la programación."
campaign_dev: "Niveles aleatorios más difíciles"
campaign_dev_description: "... en los que aprendes sobre la interfaz mientras haces algo un poco más difícil."
campaign_multiplayer: "Arenas Multijugador"
campaign_multiplayer_description: "... en las que programas cara-a-cara contra otros jugadores."
campaign_player_created: "Creados-Por-Jugadores"
campaign_player_created_description: "... en los que luchas contra la creatividad de tus compañeros <a href=\"/contribute#artisan\">Hechiceros Artesanales</a>."
level_difficulty: "Dificultad: "
contact:
contact_us: "Contacta a CodeCombat"
welcome: "¡Qué bueno es escucharte! Usa este formulario para enviarnos un mensaje"
contribute_prefix: "¡Si estas interesado en contribuir, chequea nuestra "
contribute_page: "página de contribución"
contribute_suffix: "!"
forum_prefix: "Para cualquier cosa pública, por favor prueba "
forum_page: "nuestro foro"
forum_suffix: "en su lugar."
sending: "Enviando..."
send: "Enviar Comentario"
diplomat_suggestion:
title: "¡Ayuda a traducir CodeCombat!"
sub_heading: "Necesitamos tus habilidades de idioma."
pitch_body: "Desarrollamos CodeCombat en inglés, pero ya tenemos jugadores por todo el mundo. Muchos de ellos quieren jugar en español pero no hablan inglés, así que si puedes hablar ambos, por favor considera registrarte pare ser un Diplomático y ayudar a traducir tanto el sitio de CodeCombat como todos los niveles al español."
missing_translations: "Hasta que podamos traducir todo al español, verás inglés cuando el español no esté disponible."
learn_more: "Aprende más sobre ser un Diplomático"
subscribe_as_diplomat: "Suscribete como un Diplomático"
account_settings:
title: "Configuración de la Cuenta"
not_logged_in: "Inicia sesión o crea una cuenta para cambiar tu configuración."
autosave: "Cambios Guardados Automáticamente"
me_tab: "Yo"
picture_tab: "Imagen"
wizard_tab: "Hechicero"
password_tab: "Contraseña"
emails_tab: "Correos"
language_tab: "Lenguaje"
gravatar_select: "Seleccione que foto de Gravatar usar"
gravatar_add_photos: "Añadir imágenes en miniatura y fotos a una cuenta de Gravatar para su correo electrónico para elegir una imagen."
gravatar_add_more_photos: "Añada más fotos a su cuenta de Gravatar para accederlas aquí."
wizard_color: "Color de Ropas del Hechicero"
new_password: "Nueva Contraseña"
new_password_verify: "Verificar"
email_subscriptions: "Suscripciones de Email"
email_announcements: "Noticias"
email_announcements_description: "Recibe correos electrónicos con las últimas noticias y desarrollos de CodeCombat."
contributor_emails: "Emails Clase Contribuyente"
contribute_prefix: "¡Estamos buscando gente que se una a nuestro grupo! Echa un vistazo a la "
contribute_page: "página de contribución"
contribute_suffix: "para averiguar más."
email_toggle: "Activar Todo"
language: "Lenguaje"
saving: "Guardando..."
error_saving: "Error al Guardar"
saved: "Cambios Guardados"
password_mismatch: "La contraseña no coincide."
account_profile:
edit_settings: "Editar Configuración"
profile_for_prefix: "Perfil para "
profile_for_suffix: ""
profile: "Perfil"
user_not_found: "Usuario no encontrado. ¿URL correcta?"
gravatar_not_found_mine: "No hemos podido encontrar tu perfil asociado con "
gravatar_signup_prefix: "Registratre en"
gravatar_signup_suffix: "¡Para ponerte en marcha!"
gravatar_not_found_other: "Por desgracia, no hay ningún perfil asociado con la dirección de correo electrónico de esta persona."
gravatar_contact: "Contacto"
gravatar_websites: "Sitios Web"
gravatar_accounts: "Como puede verse en"
gravatar_profile_link: "Perfil Gravatar Completo"
play_level:
level_load_error: "El nivel no puede ser cargado."
done: "Listo"
grid: "Cuadricula"
customize_wizard: "Personalizar Hechicero"
home: "Inicio"
guide: "Guia"
multiplayer: "Multijugador"
restart: "Reiniciar"
goals: "Objetivos"
action_timeline: "Cronologia de Accion"
click_to_select: "Has click en una unidad para seleccionarla."
reload_title: "¿Recargar Todo el Codigo?"
reload_really: "¿Estas seguro de que quieres empezar este nivel desde el principio?"
reload_confirm: "Recargar Todo"
victory_title_prefix: "¡"
victory_title_suffix: " Completo!"
victory_sign_up: "Registrate para recibir actualizaciones"
victory_sign_up_poke: "¿Quieres recibir las ultimas noticias por correo? ¡Crea una cuenta gratuita y te mantendremos informado!"
victory_rate_the_level: "Valora el nivel: "
victory_play_next_level: "Jugar Próximo Nivel"
victory_go_home: "Ir al Inicio"
victory_review: "¡Cuéntanos más!"
victory_hour_of_code_done: "¿Has acabado?"
victory_hour_of_code_done_yes: "¡Si, he terminado con mi Hora de Código!"
multiplayer_title: "Configuración de Multijugador"
multiplayer_link_description: "Da este enlace a cualquiera para que se te una."
multiplayer_hint_label: "Consejo:"
multiplayer_hint: " Cliquea el enlace para seleccionar todo, luego presiona ⌘-C o Ctrl-C para copiar el enlace."
multiplayer_coming_soon: "¡Más características de multijugador por venir!"
guide_title: "Guía"
tome_minion_spells: "Hechizos de tus Secuaces"
tome_read_only_spells: "Hechizos de Sólo Lectura"
tome_other_units: "Otras Unidades"
tome_cast_button_castable: "Invocable"
tome_cast_button_casting: "Invocando"
tome_cast_button_cast: "Invocar"
tome_autocast_delay: "Demora de Autolanzamiento"
tome_autocast_1: "1 segundo"
tome_autocast_3: "3 segundos"
tome_autocast_5: "5 segundos"
tome_autocast_manual: "Manual"
tome_select_spell: "Selecciona un Hechizo"
tome_select_a_thang: "Selecciona Alguien para "
tome_available_spells: "Hechizos Disponibles"
hud_continue: "Continuar (presionar shift+space)"
| 70229 | module.exports = nativeDescription: "español (América Latina)", englishDescription: "Spanish (Latin America)", translation:
common:
loading: "Cargando..."
modal:
close: "Cerrar"
okay: "OK"
not_found:
page_not_found: "Pagina no encontrada"
nav:
sign_up: "Crear Cuenta"
log_in: "Entrar"
log_out: "Salir"
play: "Jugar"
editor: "Editor"
blog: "Blog"
forum: "Foro"
admin: "Admin"
home: "Inicio"
contribute: "Contribuir"
legal: "Legal"
about: "Sobre"
contact: "Contacto"
twitter_follow: "Seguir"
forms:
name: "Nombre"
email: "Email"
message: "Mensaje"
cancel: "Cancelar"
login:
log_in: "Iniciar sesión"
sign_up: "crear nueva cuenta"
or: ", o "
recover: "recuperar cuenta"
signup:
description: "Es gratis. Solo necesitas un par de cosas y estarás listo para comenzar:"
email_announcements: "Recibe noticias por email"
coppa: "más de 13 años o fuera de los Estados Unidos"
coppa_why: "(¿Por qué?)"
creating: "Creando Cuenta..."
sign_up: "Registrarse"
or: "o "
log_in: "Inicia sesión con tu contraseña"
home:
slogan: "Aprende a programar en JavaScript jugando"
no_ie: "¡Lo sentimos! CodeCombat no funciona en Internet Explorer 9 o versiones anteriores."
no_mobile: "¡CodeCombat no fue diseñado para dispositivos móviles y quizás no funcione!"
play: "Jugar"
play:
choose_your_level: "Elige tu nivel"
adventurer_prefix: "Puedes saltar a cualquier nivel de abajo, o discutir los niveles en "
adventurer_forum: "el foro del aventurero"
adventurer_suffix: "."
campaign_beginner: "Campaña para principiantes"
campaign_beginner_description: "... en la que aprendes la hechicería de la programación."
campaign_dev: "Niveles aleatorios más difíciles"
campaign_dev_description: "... en los que aprendes sobre la interfaz mientras haces algo un poco más difícil."
campaign_multiplayer: "Arenas Multijugador"
campaign_multiplayer_description: "... en las que programas cara-a-cara contra otros jugadores."
campaign_player_created: "Creados-Por-Jugadores"
campaign_player_created_description: "... en los que luchas contra la creatividad de tus compañeros <a href=\"/contribute#artisan\">Hechiceros Artesanales</a>."
level_difficulty: "Dificultad: "
contact:
contact_us: "<NAME>a a CodeCombat"
welcome: "¡Qué bueno es escucharte! Usa este formulario para enviarnos un mensaje"
contribute_prefix: "¡Si estas interesado en contribuir, chequea nuestra "
contribute_page: "página de contribución"
contribute_suffix: "!"
forum_prefix: "Para cualquier cosa pública, por favor prueba "
forum_page: "nuestro foro"
forum_suffix: "en su lugar."
sending: "Enviando..."
send: "Enviar Comentario"
diplomat_suggestion:
title: "¡Ayuda a traducir CodeCombat!"
sub_heading: "Necesitamos tus habilidades de idioma."
pitch_body: "Desarrollamos CodeCombat en inglés, pero ya tenemos jugadores por todo el mundo. Muchos de ellos quieren jugar en español pero no hablan inglés, así que si puedes hablar ambos, por favor considera registrarte pare ser un Diplomático y ayudar a traducir tanto el sitio de CodeCombat como todos los niveles al español."
missing_translations: "Hasta que podamos traducir todo al español, verás inglés cuando el español no esté disponible."
learn_more: "Aprende más sobre ser un Diplomático"
subscribe_as_diplomat: "Suscribete como un Diplomático"
account_settings:
title: "Configuración de la Cuenta"
not_logged_in: "Inicia sesión o crea una cuenta para cambiar tu configuración."
autosave: "Cambios Guardados Automáticamente"
me_tab: "Yo"
picture_tab: "Imagen"
wizard_tab: "Hechicero"
password_tab: "Contraseña"
emails_tab: "Correos"
language_tab: "Lenguaje"
gravatar_select: "Seleccione que foto de Gravatar usar"
gravatar_add_photos: "Añadir imágenes en miniatura y fotos a una cuenta de Gravatar para su correo electrónico para elegir una imagen."
gravatar_add_more_photos: "Añada más fotos a su cuenta de Gravatar para accederlas aquí."
wizard_color: "Color de Ropas del Hechicero"
new_password: "<PASSWORD>"
new_password_verify: "<PASSWORD>"
email_subscriptions: "Suscripciones de Email"
email_announcements: "Noticias"
email_announcements_description: "Recibe correos electrónicos con las últimas noticias y desarrollos de CodeCombat."
contributor_emails: "Emails Clase Contribuyente"
contribute_prefix: "¡Estamos buscando gente que se una a nuestro grupo! Echa un vistazo a la "
contribute_page: "página de contribución"
contribute_suffix: "para averiguar más."
email_toggle: "Activar Todo"
language: "Lenguaje"
saving: "Guardando..."
error_saving: "Error al Guardar"
saved: "Cambios Guardados"
password_mismatch: "La contraseña no coincide."
account_profile:
edit_settings: "Editar Configuración"
profile_for_prefix: "Perfil para "
profile_for_suffix: ""
profile: "Perfil"
user_not_found: "Usuario no encontrado. ¿URL correcta?"
gravatar_not_found_mine: "No hemos podido encontrar tu perfil asociado con "
gravatar_signup_prefix: "Registratre en"
gravatar_signup_suffix: "¡Para ponerte en marcha!"
gravatar_not_found_other: "Por desgracia, no hay ningún perfil asociado con la dirección de correo electrónico de esta persona."
gravatar_contact: "Contacto"
gravatar_websites: "Sitios Web"
gravatar_accounts: "Como puede verse en"
gravatar_profile_link: "Perfil Gravatar Completo"
play_level:
level_load_error: "El nivel no puede ser cargado."
done: "Listo"
grid: "Cuadricula"
customize_wizard: "Personalizar Hechicero"
home: "Inicio"
guide: "Guia"
multiplayer: "Multijugador"
restart: "Reiniciar"
goals: "Objetivos"
action_timeline: "Cronologia de Accion"
click_to_select: "Has click en una unidad para seleccionarla."
reload_title: "¿Recargar Todo el Codigo?"
reload_really: "¿Estas seguro de que quieres empezar este nivel desde el principio?"
reload_confirm: "Recargar Todo"
victory_title_prefix: "¡"
victory_title_suffix: " Completo!"
victory_sign_up: "Registrate para recibir actualizaciones"
victory_sign_up_poke: "¿Quieres recibir las ultimas noticias por correo? ¡Crea una cuenta gratuita y te mantendremos informado!"
victory_rate_the_level: "Valora el nivel: "
victory_play_next_level: "Jugar Próximo Nivel"
victory_go_home: "Ir al Inicio"
victory_review: "¡Cuéntanos más!"
victory_hour_of_code_done: "¿Has acabado?"
victory_hour_of_code_done_yes: "¡Si, he terminado con mi Hora de Código!"
multiplayer_title: "Configuración de Multijugador"
multiplayer_link_description: "Da este enlace a cualquiera para que se te una."
multiplayer_hint_label: "Consejo:"
multiplayer_hint: " Cliquea el enlace para seleccionar todo, luego presiona ⌘-C o Ctrl-C para copiar el enlace."
multiplayer_coming_soon: "¡Más características de multijugador por venir!"
guide_title: "Guía"
tome_minion_spells: "Hechizos de tus Secuaces"
tome_read_only_spells: "Hechizos de Sólo Lectura"
tome_other_units: "Otras Unidades"
tome_cast_button_castable: "Invocable"
tome_cast_button_casting: "Invocando"
tome_cast_button_cast: "Invocar"
tome_autocast_delay: "Demora de Autolanzamiento"
tome_autocast_1: "1 segundo"
tome_autocast_3: "3 segundos"
tome_autocast_5: "5 segundos"
tome_autocast_manual: "Manual"
tome_select_spell: "Selecciona un Hechizo"
tome_select_a_thang: "Selecciona Alguien para "
tome_available_spells: "Hechizos Disponibles"
hud_continue: "Continuar (presionar shift+space)"
| true | module.exports = nativeDescription: "español (América Latina)", englishDescription: "Spanish (Latin America)", translation:
common:
loading: "Cargando..."
modal:
close: "Cerrar"
okay: "OK"
not_found:
page_not_found: "Pagina no encontrada"
nav:
sign_up: "Crear Cuenta"
log_in: "Entrar"
log_out: "Salir"
play: "Jugar"
editor: "Editor"
blog: "Blog"
forum: "Foro"
admin: "Admin"
home: "Inicio"
contribute: "Contribuir"
legal: "Legal"
about: "Sobre"
contact: "Contacto"
twitter_follow: "Seguir"
forms:
name: "Nombre"
email: "Email"
message: "Mensaje"
cancel: "Cancelar"
login:
log_in: "Iniciar sesión"
sign_up: "crear nueva cuenta"
or: ", o "
recover: "recuperar cuenta"
signup:
description: "Es gratis. Solo necesitas un par de cosas y estarás listo para comenzar:"
email_announcements: "Recibe noticias por email"
coppa: "más de 13 años o fuera de los Estados Unidos"
coppa_why: "(¿Por qué?)"
creating: "Creando Cuenta..."
sign_up: "Registrarse"
or: "o "
log_in: "Inicia sesión con tu contraseña"
home:
slogan: "Aprende a programar en JavaScript jugando"
no_ie: "¡Lo sentimos! CodeCombat no funciona en Internet Explorer 9 o versiones anteriores."
no_mobile: "¡CodeCombat no fue diseñado para dispositivos móviles y quizás no funcione!"
play: "Jugar"
play:
choose_your_level: "Elige tu nivel"
adventurer_prefix: "Puedes saltar a cualquier nivel de abajo, o discutir los niveles en "
adventurer_forum: "el foro del aventurero"
adventurer_suffix: "."
campaign_beginner: "Campaña para principiantes"
campaign_beginner_description: "... en la que aprendes la hechicería de la programación."
campaign_dev: "Niveles aleatorios más difíciles"
campaign_dev_description: "... en los que aprendes sobre la interfaz mientras haces algo un poco más difícil."
campaign_multiplayer: "Arenas Multijugador"
campaign_multiplayer_description: "... en las que programas cara-a-cara contra otros jugadores."
campaign_player_created: "Creados-Por-Jugadores"
campaign_player_created_description: "... en los que luchas contra la creatividad de tus compañeros <a href=\"/contribute#artisan\">Hechiceros Artesanales</a>."
level_difficulty: "Dificultad: "
contact:
contact_us: "PI:NAME:<NAME>END_PIa a CodeCombat"
welcome: "¡Qué bueno es escucharte! Usa este formulario para enviarnos un mensaje"
contribute_prefix: "¡Si estas interesado en contribuir, chequea nuestra "
contribute_page: "página de contribución"
contribute_suffix: "!"
forum_prefix: "Para cualquier cosa pública, por favor prueba "
forum_page: "nuestro foro"
forum_suffix: "en su lugar."
sending: "Enviando..."
send: "Enviar Comentario"
diplomat_suggestion:
title: "¡Ayuda a traducir CodeCombat!"
sub_heading: "Necesitamos tus habilidades de idioma."
pitch_body: "Desarrollamos CodeCombat en inglés, pero ya tenemos jugadores por todo el mundo. Muchos de ellos quieren jugar en español pero no hablan inglés, así que si puedes hablar ambos, por favor considera registrarte pare ser un Diplomático y ayudar a traducir tanto el sitio de CodeCombat como todos los niveles al español."
missing_translations: "Hasta que podamos traducir todo al español, verás inglés cuando el español no esté disponible."
learn_more: "Aprende más sobre ser un Diplomático"
subscribe_as_diplomat: "Suscribete como un Diplomático"
account_settings:
title: "Configuración de la Cuenta"
not_logged_in: "Inicia sesión o crea una cuenta para cambiar tu configuración."
autosave: "Cambios Guardados Automáticamente"
me_tab: "Yo"
picture_tab: "Imagen"
wizard_tab: "Hechicero"
password_tab: "Contraseña"
emails_tab: "Correos"
language_tab: "Lenguaje"
gravatar_select: "Seleccione que foto de Gravatar usar"
gravatar_add_photos: "Añadir imágenes en miniatura y fotos a una cuenta de Gravatar para su correo electrónico para elegir una imagen."
gravatar_add_more_photos: "Añada más fotos a su cuenta de Gravatar para accederlas aquí."
wizard_color: "Color de Ropas del Hechicero"
new_password: "PI:PASSWORD:<PASSWORD>END_PI"
new_password_verify: "PI:PASSWORD:<PASSWORD>END_PI"
email_subscriptions: "Suscripciones de Email"
email_announcements: "Noticias"
email_announcements_description: "Recibe correos electrónicos con las últimas noticias y desarrollos de CodeCombat."
contributor_emails: "Emails Clase Contribuyente"
contribute_prefix: "¡Estamos buscando gente que se una a nuestro grupo! Echa un vistazo a la "
contribute_page: "página de contribución"
contribute_suffix: "para averiguar más."
email_toggle: "Activar Todo"
language: "Lenguaje"
saving: "Guardando..."
error_saving: "Error al Guardar"
saved: "Cambios Guardados"
password_mismatch: "La contraseña no coincide."
account_profile:
edit_settings: "Editar Configuración"
profile_for_prefix: "Perfil para "
profile_for_suffix: ""
profile: "Perfil"
user_not_found: "Usuario no encontrado. ¿URL correcta?"
gravatar_not_found_mine: "No hemos podido encontrar tu perfil asociado con "
gravatar_signup_prefix: "Registratre en"
gravatar_signup_suffix: "¡Para ponerte en marcha!"
gravatar_not_found_other: "Por desgracia, no hay ningún perfil asociado con la dirección de correo electrónico de esta persona."
gravatar_contact: "Contacto"
gravatar_websites: "Sitios Web"
gravatar_accounts: "Como puede verse en"
gravatar_profile_link: "Perfil Gravatar Completo"
play_level:
level_load_error: "El nivel no puede ser cargado."
done: "Listo"
grid: "Cuadricula"
customize_wizard: "Personalizar Hechicero"
home: "Inicio"
guide: "Guia"
multiplayer: "Multijugador"
restart: "Reiniciar"
goals: "Objetivos"
action_timeline: "Cronologia de Accion"
click_to_select: "Has click en una unidad para seleccionarla."
reload_title: "¿Recargar Todo el Codigo?"
reload_really: "¿Estas seguro de que quieres empezar este nivel desde el principio?"
reload_confirm: "Recargar Todo"
victory_title_prefix: "¡"
victory_title_suffix: " Completo!"
victory_sign_up: "Registrate para recibir actualizaciones"
victory_sign_up_poke: "¿Quieres recibir las ultimas noticias por correo? ¡Crea una cuenta gratuita y te mantendremos informado!"
victory_rate_the_level: "Valora el nivel: "
victory_play_next_level: "Jugar Próximo Nivel"
victory_go_home: "Ir al Inicio"
victory_review: "¡Cuéntanos más!"
victory_hour_of_code_done: "¿Has acabado?"
victory_hour_of_code_done_yes: "¡Si, he terminado con mi Hora de Código!"
multiplayer_title: "Configuración de Multijugador"
multiplayer_link_description: "Da este enlace a cualquiera para que se te una."
multiplayer_hint_label: "Consejo:"
multiplayer_hint: " Cliquea el enlace para seleccionar todo, luego presiona ⌘-C o Ctrl-C para copiar el enlace."
multiplayer_coming_soon: "¡Más características de multijugador por venir!"
guide_title: "Guía"
tome_minion_spells: "Hechizos de tus Secuaces"
tome_read_only_spells: "Hechizos de Sólo Lectura"
tome_other_units: "Otras Unidades"
tome_cast_button_castable: "Invocable"
tome_cast_button_casting: "Invocando"
tome_cast_button_cast: "Invocar"
tome_autocast_delay: "Demora de Autolanzamiento"
tome_autocast_1: "1 segundo"
tome_autocast_3: "3 segundos"
tome_autocast_5: "5 segundos"
tome_autocast_manual: "Manual"
tome_select_spell: "Selecciona un Hechizo"
tome_select_a_thang: "Selecciona Alguien para "
tome_available_spells: "Hechizos Disponibles"
hud_continue: "Continuar (presionar shift+space)"
|
[
{
"context": "Config:\n riak_core:\n http:\n \"127.0.0.1\": {atom: 9000}\n handoff_port: 9001\n ",
"end": 433,
"score": 0.9997694492340088,
"start": 424,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "d: {atom: \"riak_kv_test_backend\"}\n... | src/test_server.coffee | geeklist/riak-js2 | 1 | sys = require 'sys'
{spawn} = require 'child_process'
fs = require 'fs'
path = require 'path'
EventEmitter = require('events').EventEmitter
Utils = require './utils'
erlangPath = path.normalize("#{__dirname}/../erl_src")
tempPath = path.normalize("#{process.cwd()}/.riaktest")
# Ported from the Ruby riak-client
class TestServer extends EventEmitter
@defaults =
appConfig:
riak_core:
http:
"127.0.0.1": {atom: 9000}
handoff_port: 9001
ring_creation_size: 64
riak_kv:
storage_backend: {atom: "riak_kv_test_backend"}
pb_ip: "127.0.0.1"
pb_port: 9002
map_js_vm_count: 8
reduce_js_vm_count: 6
hook_js_vm_count: 2
js_max_vm_mem: 8
js_thread_stack: 16
riak_kv_stat: true
luwak:
enabled: false
sasl:
errlog_type: {atom: "error"}
vmArgs:
"-name": "riaktest#{Math.floor(Math.random()*100000000000)}@127.0.0.1"
"-setcookie": "riak-js-test"
"+K": true
"+A": 64
"-smp": "enable"
"-env ERL_MAX_PORTS": 4096
"-env ERL_FULLSWEEP_AFTER": 0
"-pa": erlangPath
tempDir: tempPath
constructor: (options) ->
@options = Utils.mixin true, {}, TestServer.defaults, options
@options.appConfig.riak_core.ring_state_dir = "#{@options.tempDir}/data/ring"
@options.binDir = path.normalize(@options.binDir)
@erlangPrompt = new RegExp("^.#{@options.vmArgs['-name']}.\\d+>", "m")
prepare: (callback) ->
if @prepared
callback() if callback
else
@createTempDirectories =>
@riakScript = "#{@temp_bin}/riak"
@writeRiakScript =>
@writeVmArgs =>
@writeAppConfig =>
@prepared = true
callback() if callback
start: (callback) ->
if @started
callback() if callback
else if @prepared and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
@once 'erlangPrompt', setStarted
@console = spawn(@riakScript, ["console"])
@console.stdout.setEncoding("ascii")
@console.stderr.setEncoding("ascii")
# do the work of what we get from expect() in Ruby
@console.stdout.on 'data', (data) =>
unless data.search(@erlangPrompt) is -1
@emit('erlangPrompt')
if @options.debug
@console.stderr.on 'data', sys.debug
@console.stdout.on 'data', sys.debug
process.on 'exit', =>
@console.kill('SIGKILL') if @console
@registerStop()
stop: (callback) ->
if not @started and callback
callback()
if @started and @listeners('erlangPrompt').length is 0
@console.on 'exit', callback if callback
@console.kill('SIGHUP')
@registerStop()
clear: (callback) ->
if @started and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
sendReset = =>
@once 'erlangPrompt', setStarted
@started = false
@console.stdin.write("riak_kv_test_backend:reset().\n", "ascii")
@once 'erlangPrompt', sendReset
@console.stdin.write("ok.\n", "ascii")
registerStop: ->
@removeAllListeners('erlangPrompt')
delete @console
@started = false
createTempDirectories: (callback) ->
subdirs = for dir in ['bin', 'etc', 'log', 'data', 'pipe']
this["temp_#{dir}"] = path.normalize("#{@options.tempDir}/#{dir}")
subdirs.unshift @options.tempDir
createDir = =>
if subdirs.length is 0
callback()
else
currDir = subdirs.shift()
fs.mkdir currDir, 0o0700, createDir
rmrf = spawn("rm", ["-rf", @options.tempDir])
rmrf.on 'exit', createDir
writeRiakScript: (callback) ->
outScript = fs.createWriteStream @riakScript, {encoding: 'utf8', mode: 0o0700}
inScript = fs.createReadStream "#{@options.binDir}/riak", encoding: 'utf8'
inScript.on 'error', (err) ->
sys.debug "error reading from #{inScript.path}:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'error', (err) ->
sys.debug "error writing to #{outScript.path} script:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'drain', -> inScript.resume()
inScript.on 'data', (data) =>
data = data.toString('utf8') if Buffer.isBuffer(data)
data = data.replace(/(RUNNER_SCRIPT_DIR=)(.*)$/m, "$1#{@temp_bin}")
data = data.replace(/(RUNNER_ETC_DIR=)(.*)$/m, "$1#{@temp_etc}")
data = data.replace(/(RUNNER_USER=)(.*)$/m, "$1")
data = data.replace(/(RUNNER_LOG_DIR=)(.*)$/m, "$1#{@temp_log}")
data = data.replace(/(PIPE_DIR=)(.*)$/m, "$1#{@temp_pipe}")
data = data.replace("RUNNER_BASE_DIR=${RUNNER_SCRIPT_DIR%/*}", "RUNNER_BASE_DIR=#{path.normalize(@options.binDir + '/..')}")
outScript.write data
inScript.pause()
inScript.on 'end', ->
outScript.end()
callback() if callback
writeVmArgs: (callback) ->
vmArgs = for own option, value of @options.vmArgs
"#{option} #{value}"
vmArgs = vmArgs.join("\n")
fs.writeFile("#{@temp_etc}/vm.args", vmArgs, callback)
writeAppConfig: (callback) ->
appConfig = @toErlangConfig(@options.appConfig) + "."
fs.writeFile("#{@temp_etc}/app.config", appConfig, callback)
# Converts an object into app.config-compatible Erlang terms
toErlangConfig: (object, depth = 1) ->
padding = (' ' for num in [1..depth]).join ""
parentPadding = if depth <= 1
''
else
(' ' for num in [1..(depth-1)]).join ""
values = for own key, value of object
if value.atom?
printable = value.atom
else if typeof value is 'string'
printable = "\"#{value}\""
else if value instanceof Object
printable = @toErlangConfig(value, depth+1)
else
printable = value.toString()
if !key.match(/^[a-z][a-zA-Z0-9@_]*$/)
key = "\"#{key}\""
"{#{key}, #{printable}}"
values = values.join(",\n#{padding}")
"[\n#{padding}#{values}\n#{parentPadding}]"
# Node v0.2.6 doesn't have EventEmitter.once
once: (type, listener) ->
callback = =>
@removeListener(type, callback)
listener.apply(this, arguments)
@on type, callback
this
module.exports = TestServer
| 168862 | sys = require 'sys'
{spawn} = require 'child_process'
fs = require 'fs'
path = require 'path'
EventEmitter = require('events').EventEmitter
Utils = require './utils'
erlangPath = path.normalize("#{__dirname}/../erl_src")
tempPath = path.normalize("#{process.cwd()}/.riaktest")
# Ported from the Ruby riak-client
class TestServer extends EventEmitter
@defaults =
appConfig:
riak_core:
http:
"127.0.0.1": {atom: 9000}
handoff_port: 9001
ring_creation_size: 64
riak_kv:
storage_backend: {atom: "riak_kv_test_backend"}
pb_ip: "127.0.0.1"
pb_port: 9002
map_js_vm_count: 8
reduce_js_vm_count: 6
hook_js_vm_count: 2
js_max_vm_mem: 8
js_thread_stack: 16
riak_kv_stat: true
luwak:
enabled: false
sasl:
errlog_type: {atom: "error"}
vmArgs:
"-name": "riaktest#{Math.floor(Math.random()*100000000000)}@127.0.0.1"
"-setcookie": "riak-js-test"
"+K": true
"+A": 64
"-smp": "enable"
"-env ERL_MAX_PORTS": 4096
"-env ERL_FULLSWEEP_AFTER": 0
"-pa": erlangPath
tempDir: tempPath
constructor: (options) ->
@options = Utils.mixin true, {}, TestServer.defaults, options
@options.appConfig.riak_core.ring_state_dir = "#{@options.tempDir}/data/ring"
@options.binDir = path.normalize(@options.binDir)
@erlangPrompt = new RegExp("^.#{@options.vmArgs['-name']}.\\d+>", "m")
prepare: (callback) ->
if @prepared
callback() if callback
else
@createTempDirectories =>
@riakScript = "#{@temp_bin}/riak"
@writeRiakScript =>
@writeVmArgs =>
@writeAppConfig =>
@prepared = true
callback() if callback
start: (callback) ->
if @started
callback() if callback
else if @prepared and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
@once 'erlangPrompt', setStarted
@console = spawn(@riakScript, ["console"])
@console.stdout.setEncoding("ascii")
@console.stderr.setEncoding("ascii")
# do the work of what we get from expect() in Ruby
@console.stdout.on 'data', (data) =>
unless data.search(@erlangPrompt) is -1
@emit('erlangPrompt')
if @options.debug
@console.stderr.on 'data', sys.debug
@console.stdout.on 'data', sys.debug
process.on 'exit', =>
@console.kill('SIGKILL') if @console
@registerStop()
stop: (callback) ->
if not @started and callback
callback()
if @started and @listeners('erlangPrompt').length is 0
@console.on 'exit', callback if callback
@console.kill('SIGHUP')
@registerStop()
clear: (callback) ->
if @started and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
sendReset = =>
@once 'erlangPrompt', setStarted
@started = false
@console.stdin.write("riak_kv_test_backend:reset().\n", "ascii")
@once 'erlangPrompt', sendReset
@console.stdin.write("ok.\n", "ascii")
registerStop: ->
@removeAllListeners('erlangPrompt')
delete @console
@started = false
createTempDirectories: (callback) ->
subdirs = for dir in ['bin', 'etc', 'log', 'data', 'pipe']
this["temp_#{dir}"] = path.normalize("#{@options.tempDir}/#{dir}")
subdirs.unshift @options.tempDir
createDir = =>
if subdirs.length is 0
callback()
else
currDir = subdirs.shift()
fs.mkdir currDir, 0o0700, createDir
rmrf = spawn("rm", ["-rf", @options.tempDir])
rmrf.on 'exit', createDir
writeRiakScript: (callback) ->
outScript = fs.createWriteStream @riakScript, {encoding: 'utf8', mode: 0o0700}
inScript = fs.createReadStream "#{@options.binDir}/riak", encoding: 'utf8'
inScript.on 'error', (err) ->
sys.debug "error reading from #{inScript.path}:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'error', (err) ->
sys.debug "error writing to #{outScript.path} script:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'drain', -> inScript.resume()
inScript.on 'data', (data) =>
data = data.toString('utf8') if Buffer.isBuffer(data)
data = data.replace(/(RUNNER_SCRIPT_DIR=)(.*)$/m, "$1#{@temp_bin}")
data = data.replace(/(RUNNER_ETC_DIR=)(.*)$/m, "$1#{@temp_etc}")
data = data.replace(/(RUNNER_USER=)(.*)$/m, "$1")
data = data.replace(/(RUNNER_LOG_DIR=)(.*)$/m, "$1#{@temp_log}")
data = data.replace(/(PIPE_DIR=)(.*)$/m, "$1#{@temp_pipe}")
data = data.replace("RUNNER_BASE_DIR=${RUNNER_SCRIPT_DIR%/*}", "RUNNER_BASE_DIR=#{path.normalize(@options.binDir + '/..')}")
outScript.write data
inScript.pause()
inScript.on 'end', ->
outScript.end()
callback() if callback
writeVmArgs: (callback) ->
vmArgs = for own option, value of @options.vmArgs
"#{option} #{value}"
vmArgs = vmArgs.join("\n")
fs.writeFile("#{@temp_etc}/vm.args", vmArgs, callback)
writeAppConfig: (callback) ->
appConfig = @toErlangConfig(@options.appConfig) + "."
fs.writeFile("#{@temp_etc}/app.config", appConfig, callback)
# Converts an object into app.config-compatible Erlang terms
toErlangConfig: (object, depth = 1) ->
padding = (' ' for num in [1..depth]).join ""
parentPadding = if depth <= 1
''
else
(' ' for num in [1..(depth-1)]).join ""
values = for own key, value of object
if value.atom?
printable = value.atom
else if typeof value is 'string'
printable = "\"#{value}\""
else if value instanceof Object
printable = @toErlangConfig(value, depth+1)
else
printable = value.toString()
if !key.match(/^[a-z][a-zA-Z0-9@_]*$/)
key = "\"#{key<KEY>}\""
"{#{key}, #{printable}}"
values = values.join(",\n#{padding}")
"[\n#{padding}#{values}\n#{parentPadding}]"
# Node v0.2.6 doesn't have EventEmitter.once
once: (type, listener) ->
callback = =>
@removeListener(type, callback)
listener.apply(this, arguments)
@on type, callback
this
module.exports = TestServer
| true | sys = require 'sys'
{spawn} = require 'child_process'
fs = require 'fs'
path = require 'path'
EventEmitter = require('events').EventEmitter
Utils = require './utils'
erlangPath = path.normalize("#{__dirname}/../erl_src")
tempPath = path.normalize("#{process.cwd()}/.riaktest")
# Ported from the Ruby riak-client
class TestServer extends EventEmitter
@defaults =
appConfig:
riak_core:
http:
"127.0.0.1": {atom: 9000}
handoff_port: 9001
ring_creation_size: 64
riak_kv:
storage_backend: {atom: "riak_kv_test_backend"}
pb_ip: "127.0.0.1"
pb_port: 9002
map_js_vm_count: 8
reduce_js_vm_count: 6
hook_js_vm_count: 2
js_max_vm_mem: 8
js_thread_stack: 16
riak_kv_stat: true
luwak:
enabled: false
sasl:
errlog_type: {atom: "error"}
vmArgs:
"-name": "riaktest#{Math.floor(Math.random()*100000000000)}@127.0.0.1"
"-setcookie": "riak-js-test"
"+K": true
"+A": 64
"-smp": "enable"
"-env ERL_MAX_PORTS": 4096
"-env ERL_FULLSWEEP_AFTER": 0
"-pa": erlangPath
tempDir: tempPath
constructor: (options) ->
@options = Utils.mixin true, {}, TestServer.defaults, options
@options.appConfig.riak_core.ring_state_dir = "#{@options.tempDir}/data/ring"
@options.binDir = path.normalize(@options.binDir)
@erlangPrompt = new RegExp("^.#{@options.vmArgs['-name']}.\\d+>", "m")
prepare: (callback) ->
if @prepared
callback() if callback
else
@createTempDirectories =>
@riakScript = "#{@temp_bin}/riak"
@writeRiakScript =>
@writeVmArgs =>
@writeAppConfig =>
@prepared = true
callback() if callback
start: (callback) ->
if @started
callback() if callback
else if @prepared and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
@once 'erlangPrompt', setStarted
@console = spawn(@riakScript, ["console"])
@console.stdout.setEncoding("ascii")
@console.stderr.setEncoding("ascii")
# do the work of what we get from expect() in Ruby
@console.stdout.on 'data', (data) =>
unless data.search(@erlangPrompt) is -1
@emit('erlangPrompt')
if @options.debug
@console.stderr.on 'data', sys.debug
@console.stdout.on 'data', sys.debug
process.on 'exit', =>
@console.kill('SIGKILL') if @console
@registerStop()
stop: (callback) ->
if not @started and callback
callback()
if @started and @listeners('erlangPrompt').length is 0
@console.on 'exit', callback if callback
@console.kill('SIGHUP')
@registerStop()
clear: (callback) ->
if @started and @listeners('erlangPrompt').length is 0
setStarted = =>
@started = true
callback() if callback
sendReset = =>
@once 'erlangPrompt', setStarted
@started = false
@console.stdin.write("riak_kv_test_backend:reset().\n", "ascii")
@once 'erlangPrompt', sendReset
@console.stdin.write("ok.\n", "ascii")
registerStop: ->
@removeAllListeners('erlangPrompt')
delete @console
@started = false
createTempDirectories: (callback) ->
subdirs = for dir in ['bin', 'etc', 'log', 'data', 'pipe']
this["temp_#{dir}"] = path.normalize("#{@options.tempDir}/#{dir}")
subdirs.unshift @options.tempDir
createDir = =>
if subdirs.length is 0
callback()
else
currDir = subdirs.shift()
fs.mkdir currDir, 0o0700, createDir
rmrf = spawn("rm", ["-rf", @options.tempDir])
rmrf.on 'exit', createDir
writeRiakScript: (callback) ->
outScript = fs.createWriteStream @riakScript, {encoding: 'utf8', mode: 0o0700}
inScript = fs.createReadStream "#{@options.binDir}/riak", encoding: 'utf8'
inScript.on 'error', (err) ->
sys.debug "error reading from #{inScript.path}:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'error', (err) ->
sys.debug "error writing to #{outScript.path} script:\n#{sys.inspect(err, true, null)}"
throw err
outScript.on 'drain', -> inScript.resume()
inScript.on 'data', (data) =>
data = data.toString('utf8') if Buffer.isBuffer(data)
data = data.replace(/(RUNNER_SCRIPT_DIR=)(.*)$/m, "$1#{@temp_bin}")
data = data.replace(/(RUNNER_ETC_DIR=)(.*)$/m, "$1#{@temp_etc}")
data = data.replace(/(RUNNER_USER=)(.*)$/m, "$1")
data = data.replace(/(RUNNER_LOG_DIR=)(.*)$/m, "$1#{@temp_log}")
data = data.replace(/(PIPE_DIR=)(.*)$/m, "$1#{@temp_pipe}")
data = data.replace("RUNNER_BASE_DIR=${RUNNER_SCRIPT_DIR%/*}", "RUNNER_BASE_DIR=#{path.normalize(@options.binDir + '/..')}")
outScript.write data
inScript.pause()
inScript.on 'end', ->
outScript.end()
callback() if callback
writeVmArgs: (callback) ->
vmArgs = for own option, value of @options.vmArgs
"#{option} #{value}"
vmArgs = vmArgs.join("\n")
fs.writeFile("#{@temp_etc}/vm.args", vmArgs, callback)
writeAppConfig: (callback) ->
appConfig = @toErlangConfig(@options.appConfig) + "."
fs.writeFile("#{@temp_etc}/app.config", appConfig, callback)
# Converts an object into app.config-compatible Erlang terms
toErlangConfig: (object, depth = 1) ->
padding = (' ' for num in [1..depth]).join ""
parentPadding = if depth <= 1
''
else
(' ' for num in [1..(depth-1)]).join ""
values = for own key, value of object
if value.atom?
printable = value.atom
else if typeof value is 'string'
printable = "\"#{value}\""
else if value instanceof Object
printable = @toErlangConfig(value, depth+1)
else
printable = value.toString()
if !key.match(/^[a-z][a-zA-Z0-9@_]*$/)
key = "\"#{keyPI:KEY:<KEY>END_PI}\""
"{#{key}, #{printable}}"
values = values.join(",\n#{padding}")
"[\n#{padding}#{values}\n#{parentPadding}]"
# Node v0.2.6 doesn't have EventEmitter.once
once: (type, listener) ->
callback = =>
@removeListener(type, callback)
listener.apply(this, arguments)
@on type, callback
this
module.exports = TestServer
|
[
{
"context": "cookie = find cookies, (cookie) -> cookie.key == 'SAPISID'\n return cookie?.value\n\nMAX_RETRIES = 5\n\n\nmodu",
"end": 2439,
"score": 0.9773540496826172,
"start": 2432,
"tag": "KEY",
"value": "SAPISID"
},
{
"context": "en ->\n wait(1000) # https://github.... | Program Files/yakyak-win32-x64/resources/app/node_modules/hangupsjs/src/channel.coffee | Austcool-Walker/YakYak-Installer-amd64 | 0 | require('fnuc').expose global
{CookieJar} = require 'tough-cookie'
request = require 'request'
crypto = require 'crypto'
log = require 'bog'
Q = require 'q'
{req, find, wait, NetworkError, fmterr} = require './util'
PushDataParser = require './pushdataparser'
ORIGIN_URL = 'https://talkgadget.google.com'
CHANNEL_URL_PREFIX = 'https://0.client-channel.google.com/client-channel'
UA = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36
(KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36'
op = (o) -> "#{CHANNEL_URL_PREFIX}/#{o}"
isUnknownSID = (res) -> res.statusCode == 400 and res.statusMessage == 'Unknown SID'
# error token
ABORT = {}
# typical long poll
#
# 2015-05-02 14:44:19 DEBUG found sid/gsid 5ECBB7A224ED4276 XOqP3EYTfy6z0eGEr9OD5A
# 2015-05-02 14:44:19 DEBUG long poll req
# 2015-05-02 14:44:19 DEBUG long poll response 200 OK
# 2015-05-02 14:44:19 DEBUG got msg [[[2,["noop"]]]]
# 2015-05-02 14:44:19 DEBUG got msg [[[3,[{"p":"{\"1\":{\"1\":{\"1\":{\"1\":1,\"2\":1}},\"4\":\"1430570659159\",\"5\":\"S1\"},\"3\":{\"1\":{\"1\":1},\"2\":\"lcsw_hangouts881CED94\"}}"}]]]]
# 2015-05-02 14:44:49 DEBUG got msg [[[4,["noop"]]]]
# 2015-05-02 14:45:14 DEBUG got msg [[[5,["noop"]]]]
# ...
# 2015-05-02 14:47:56 DEBUG got msg [[[11,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG got msg [[[12,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG long poll end
# 2015-05-02 14:48:21 DEBUG long poll req
# 2015-05-02 14:48:21 DEBUG long poll response 200 OK
# 2015-05-02 14:48:21 DEBUG got msg [[[13,["noop"]]]]
# ...
# 2015-05-02 15:31:39 DEBUG long poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG backing off for 2 ms
# 2015-05-02 15:31:39 DEBUG long poll end
# 2015-05-02 15:31:39 DEBUG long poll req
# 2015-05-02 15:31:39 DEBUG long poll response 200 OK
# 2015-05-02 15:31:39 DEBUG got msg [[[121,["noop"]]]]
authhead = (sapisid, msec, origin) ->
auth_string = "#{msec} #{sapisid} #{origin}"
auth_hash = crypto.createHash('sha1').update(auth_string).digest 'hex'
return {
authorization: "SAPISIDHASH #{msec}_#{auth_hash}"
'x-origin': origin
'x-goog-authuser': '0'
}
sapisidof = (jarstore) ->
jar = new CookieJar jarstore
cookies = jar.getCookiesSync ORIGIN_URL
cookie = find cookies, (cookie) -> cookie.key == 'SAPISID'
return cookie?.value
MAX_RETRIES = 5
module.exports = class Channel
constructor: (@jarstore, @proxy) ->
@pushParser = new PushDataParser()
fetchPvt: =>
log.debug 'fetching pvt'
opts =
method: 'GET'
uri: "#{ORIGIN_URL}/talkgadget/_/extension-start"
jar: request.jar @jarstore
withCredentials: true
req(opts).then (res) =>
data = JSON.parse res.body
log.debug 'found pvt token', data[1]
data[1]
.fail (err) ->
log.info 'fetchPvt failed', fmterr(err)
Q.reject err
authHeaders: ->
sapisid = sapisidof @jarstore
unless sapisid
log.warn 'no SAPISID cookie'
return null
authhead sapisid, Date.now(), ORIGIN_URL
fetchSid: =>
auth = @authHeaders()
return Q.reject new Error("No auth headers") unless auth
Q().then =>
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
RID: 81187
ctype: 'hangouts'
form:
count: 0
headers: auth
encoding: null # get body as buffer
withCredentials: true
req(opts).then (res) ->
# Example format (after parsing JS):
# [ [0,["c","SID_HERE","",8]],
# [1,[{"gsid":"GSESSIONID_HERE"}]]]
if res.statusCode == 200
p = new PushDataParser(res.body)
line = p.pop()
[_,[_,sid]] = line[0]
[_,[{gsid}]] = line[1]
log.debug 'found sid/gsid', sid, gsid
return {sid,gsid}
else
log.warn 'failed to get sid', res.statusCode, res.body
.fail (err) ->
log.info 'fetchSid failed', fmterr(err)
Q.reject err
# get next messages from channel
getLines: =>
@start() unless @running
@pushParser.allLines()
# start polling
start: =>
retries = MAX_RETRIES
@running = true
@sid = null # ensures we get a new sid
@gsid = null
@subscribed = false
run = =>
# graceful stop of polling
return unless @running
@poll(retries).then ->
# XXX we only reset to MAX_RETRIES after a full ended
# poll. this means in bad network conditions we get an
# edge case where retries never reset despite getting
# (interrupted) good polls. perhaps move retries to
# instance var?
retries = MAX_RETRIES # reset on success
run()
.fail (err) =>
# abort token is not an error
return if err == ABORT
retries--
log.debug 'poll error', err
if retries > 0
run()
else
@running = false
# resetting with error makes pushParser.allLines()
# resolve with that error, which in turn makes
# @getLines() propagate the error out.
@pushParser.reset(err)
run()
return null
# gracefully stop polling
stop: =>
log.debug 'channel stop'
# stop looping
@running = false
# this releases the @getLines() promise
@pushParser?.reset?()
# abort current request
@currentReq?.abort?()
poll: (retries) =>
Q().then ->
backoffTime = 2 * (MAX_RETRIES - retries) * 1000
log.debug 'backing off for', backoffTime, 'ms' if backoffTime
wait backoffTime
.then =>
Q.reject ABORT unless @running
.then =>
unless @sid
@fetchSid().then (o) =>
merge this, o # set on this
@pushParser.reset() # ensure no half data
.then =>
@reqpoll()
# long polling
reqpoll: => Q.Promise (rs, rj) =>
log.debug 'long poll req'
opts =
method: 'GET'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
gsessionid: @gsid
RID: 'rpc'
t: 1
SID: @sid
CI: 0
ctype: 'hangouts'
TYPE: 'xmlhttp'
headers: @authHeaders()
encoding: null # get body as buffer
timeout: 30000 # 30 seconds timeout in connect attempt
withCredentials: true
ok = false
@currentReq = request(opts).on 'response', (res) =>
log.debug 'long poll response', res.statusCode, res.statusMessage
if res.statusCode == 200
return ok = true
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
rj NetworkError.forRes(res)
.on 'data', (chunk) =>
if ok
# log.debug 'long poll chunk\n' + require('hexy').hexy(chunk)
@pushParser.parse chunk
# subscribe on first data received
@subscribe() unless @subscribed
.on 'error', (err) =>
log.debug 'long poll error', err
rj err
.on 'end', ->
log.debug 'long poll end'
rs()
# Subscribes the channel to receive relevant events. Only needs to
# be called when a new channel (SID/gsessionid) is opened.
subscribe: =>
return if @subscribed
@subscribed = true
Q().then ->
wait(1000) # https://github.com/tdryer/hangups/issues/58
.then =>
timestamp = Date.now() * 1000
services = ['babel', 'babel_presence_last_seen']
mapList = for service in services
JSON.stringify({"3": {"1": {"1": service}}})
formMap = {count: mapList.length, ofs: 0}
for el, ix in mapList
formMap["req#{ix}_p"] = el
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
proxy: @proxy
qs:
VER: 8
RID: 81188
ctype: 'hangouts'
gsessionid: @gsid
SID: @sid
headers: @authHeaders()
timeout: 30000 # 30 seconds timeout in connect attempt
form: formMap
withCredentials: true
req(opts)
.then (res) ->
if res.statusCode == 200
return log.debug 'subscribed channel'
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
Q.reject NetworkError.forRes(res)
.fail (err) =>
log.info 'subscribe failed', fmterr(err)
@subscribed = false
Q.reject err
| 105207 | require('fnuc').expose global
{CookieJar} = require 'tough-cookie'
request = require 'request'
crypto = require 'crypto'
log = require 'bog'
Q = require 'q'
{req, find, wait, NetworkError, fmterr} = require './util'
PushDataParser = require './pushdataparser'
ORIGIN_URL = 'https://talkgadget.google.com'
CHANNEL_URL_PREFIX = 'https://0.client-channel.google.com/client-channel'
UA = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36
(KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36'
op = (o) -> "#{CHANNEL_URL_PREFIX}/#{o}"
isUnknownSID = (res) -> res.statusCode == 400 and res.statusMessage == 'Unknown SID'
# error token
ABORT = {}
# typical long poll
#
# 2015-05-02 14:44:19 DEBUG found sid/gsid 5ECBB7A224ED4276 XOqP3EYTfy6z0eGEr9OD5A
# 2015-05-02 14:44:19 DEBUG long poll req
# 2015-05-02 14:44:19 DEBUG long poll response 200 OK
# 2015-05-02 14:44:19 DEBUG got msg [[[2,["noop"]]]]
# 2015-05-02 14:44:19 DEBUG got msg [[[3,[{"p":"{\"1\":{\"1\":{\"1\":{\"1\":1,\"2\":1}},\"4\":\"1430570659159\",\"5\":\"S1\"},\"3\":{\"1\":{\"1\":1},\"2\":\"lcsw_hangouts881CED94\"}}"}]]]]
# 2015-05-02 14:44:49 DEBUG got msg [[[4,["noop"]]]]
# 2015-05-02 14:45:14 DEBUG got msg [[[5,["noop"]]]]
# ...
# 2015-05-02 14:47:56 DEBUG got msg [[[11,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG got msg [[[12,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG long poll end
# 2015-05-02 14:48:21 DEBUG long poll req
# 2015-05-02 14:48:21 DEBUG long poll response 200 OK
# 2015-05-02 14:48:21 DEBUG got msg [[[13,["noop"]]]]
# ...
# 2015-05-02 15:31:39 DEBUG long poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG backing off for 2 ms
# 2015-05-02 15:31:39 DEBUG long poll end
# 2015-05-02 15:31:39 DEBUG long poll req
# 2015-05-02 15:31:39 DEBUG long poll response 200 OK
# 2015-05-02 15:31:39 DEBUG got msg [[[121,["noop"]]]]
authhead = (sapisid, msec, origin) ->
auth_string = "#{msec} #{sapisid} #{origin}"
auth_hash = crypto.createHash('sha1').update(auth_string).digest 'hex'
return {
authorization: "SAPISIDHASH #{msec}_#{auth_hash}"
'x-origin': origin
'x-goog-authuser': '0'
}
sapisidof = (jarstore) ->
jar = new CookieJar jarstore
cookies = jar.getCookiesSync ORIGIN_URL
cookie = find cookies, (cookie) -> cookie.key == '<KEY>'
return cookie?.value
MAX_RETRIES = 5
module.exports = class Channel
constructor: (@jarstore, @proxy) ->
@pushParser = new PushDataParser()
fetchPvt: =>
log.debug 'fetching pvt'
opts =
method: 'GET'
uri: "#{ORIGIN_URL}/talkgadget/_/extension-start"
jar: request.jar @jarstore
withCredentials: true
req(opts).then (res) =>
data = JSON.parse res.body
log.debug 'found pvt token', data[1]
data[1]
.fail (err) ->
log.info 'fetchPvt failed', fmterr(err)
Q.reject err
authHeaders: ->
sapisid = sapisidof @jarstore
unless sapisid
log.warn 'no SAPISID cookie'
return null
authhead sapisid, Date.now(), ORIGIN_URL
fetchSid: =>
auth = @authHeaders()
return Q.reject new Error("No auth headers") unless auth
Q().then =>
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
RID: 81187
ctype: 'hangouts'
form:
count: 0
headers: auth
encoding: null # get body as buffer
withCredentials: true
req(opts).then (res) ->
# Example format (after parsing JS):
# [ [0,["c","SID_HERE","",8]],
# [1,[{"gsid":"GSESSIONID_HERE"}]]]
if res.statusCode == 200
p = new PushDataParser(res.body)
line = p.pop()
[_,[_,sid]] = line[0]
[_,[{gsid}]] = line[1]
log.debug 'found sid/gsid', sid, gsid
return {sid,gsid}
else
log.warn 'failed to get sid', res.statusCode, res.body
.fail (err) ->
log.info 'fetchSid failed', fmterr(err)
Q.reject err
# get next messages from channel
getLines: =>
@start() unless @running
@pushParser.allLines()
# start polling
start: =>
retries = MAX_RETRIES
@running = true
@sid = null # ensures we get a new sid
@gsid = null
@subscribed = false
run = =>
# graceful stop of polling
return unless @running
@poll(retries).then ->
# XXX we only reset to MAX_RETRIES after a full ended
# poll. this means in bad network conditions we get an
# edge case where retries never reset despite getting
# (interrupted) good polls. perhaps move retries to
# instance var?
retries = MAX_RETRIES # reset on success
run()
.fail (err) =>
# abort token is not an error
return if err == ABORT
retries--
log.debug 'poll error', err
if retries > 0
run()
else
@running = false
# resetting with error makes pushParser.allLines()
# resolve with that error, which in turn makes
# @getLines() propagate the error out.
@pushParser.reset(err)
run()
return null
# gracefully stop polling
stop: =>
log.debug 'channel stop'
# stop looping
@running = false
# this releases the @getLines() promise
@pushParser?.reset?()
# abort current request
@currentReq?.abort?()
poll: (retries) =>
Q().then ->
backoffTime = 2 * (MAX_RETRIES - retries) * 1000
log.debug 'backing off for', backoffTime, 'ms' if backoffTime
wait backoffTime
.then =>
Q.reject ABORT unless @running
.then =>
unless @sid
@fetchSid().then (o) =>
merge this, o # set on this
@pushParser.reset() # ensure no half data
.then =>
@reqpoll()
# long polling
reqpoll: => Q.Promise (rs, rj) =>
log.debug 'long poll req'
opts =
method: 'GET'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
gsessionid: @gsid
RID: 'rpc'
t: 1
SID: @sid
CI: 0
ctype: 'hangouts'
TYPE: 'xmlhttp'
headers: @authHeaders()
encoding: null # get body as buffer
timeout: 30000 # 30 seconds timeout in connect attempt
withCredentials: true
ok = false
@currentReq = request(opts).on 'response', (res) =>
log.debug 'long poll response', res.statusCode, res.statusMessage
if res.statusCode == 200
return ok = true
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
rj NetworkError.forRes(res)
.on 'data', (chunk) =>
if ok
# log.debug 'long poll chunk\n' + require('hexy').hexy(chunk)
@pushParser.parse chunk
# subscribe on first data received
@subscribe() unless @subscribed
.on 'error', (err) =>
log.debug 'long poll error', err
rj err
.on 'end', ->
log.debug 'long poll end'
rs()
# Subscribes the channel to receive relevant events. Only needs to
# be called when a new channel (SID/gsessionid) is opened.
subscribe: =>
return if @subscribed
@subscribed = true
Q().then ->
wait(1000) # https://github.com/tdryer/hangups/issues/58
.then =>
timestamp = Date.now() * 1000
services = ['babel', 'babel_presence_last_seen']
mapList = for service in services
JSON.stringify({"3": {"1": {"1": service}}})
formMap = {count: mapList.length, ofs: 0}
for el, ix in mapList
formMap["req#{ix}_p"] = el
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
proxy: @proxy
qs:
VER: 8
RID: 81188
ctype: 'hangouts'
gsessionid: @gsid
SID: @sid
headers: @authHeaders()
timeout: 30000 # 30 seconds timeout in connect attempt
form: formMap
withCredentials: true
req(opts)
.then (res) ->
if res.statusCode == 200
return log.debug 'subscribed channel'
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
Q.reject NetworkError.forRes(res)
.fail (err) =>
log.info 'subscribe failed', fmterr(err)
@subscribed = false
Q.reject err
| true | require('fnuc').expose global
{CookieJar} = require 'tough-cookie'
request = require 'request'
crypto = require 'crypto'
log = require 'bog'
Q = require 'q'
{req, find, wait, NetworkError, fmterr} = require './util'
PushDataParser = require './pushdataparser'
ORIGIN_URL = 'https://talkgadget.google.com'
CHANNEL_URL_PREFIX = 'https://0.client-channel.google.com/client-channel'
UA = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36
(KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36'
op = (o) -> "#{CHANNEL_URL_PREFIX}/#{o}"
isUnknownSID = (res) -> res.statusCode == 400 and res.statusMessage == 'Unknown SID'
# error token
ABORT = {}
# typical long poll
#
# 2015-05-02 14:44:19 DEBUG found sid/gsid 5ECBB7A224ED4276 XOqP3EYTfy6z0eGEr9OD5A
# 2015-05-02 14:44:19 DEBUG long poll req
# 2015-05-02 14:44:19 DEBUG long poll response 200 OK
# 2015-05-02 14:44:19 DEBUG got msg [[[2,["noop"]]]]
# 2015-05-02 14:44:19 DEBUG got msg [[[3,[{"p":"{\"1\":{\"1\":{\"1\":{\"1\":1,\"2\":1}},\"4\":\"1430570659159\",\"5\":\"S1\"},\"3\":{\"1\":{\"1\":1},\"2\":\"lcsw_hangouts881CED94\"}}"}]]]]
# 2015-05-02 14:44:49 DEBUG got msg [[[4,["noop"]]]]
# 2015-05-02 14:45:14 DEBUG got msg [[[5,["noop"]]]]
# ...
# 2015-05-02 14:47:56 DEBUG got msg [[[11,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG got msg [[[12,["noop"]]]]
# 2015-05-02 14:48:21 DEBUG long poll end
# 2015-05-02 14:48:21 DEBUG long poll req
# 2015-05-02 14:48:21 DEBUG long poll response 200 OK
# 2015-05-02 14:48:21 DEBUG got msg [[[13,["noop"]]]]
# ...
# 2015-05-02 15:31:39 DEBUG long poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG poll error { [Error: ESOCKETTIMEDOUT] code: 'ESOCKETTIMEDOUT' }
# 2015-05-02 15:31:39 DEBUG backing off for 2 ms
# 2015-05-02 15:31:39 DEBUG long poll end
# 2015-05-02 15:31:39 DEBUG long poll req
# 2015-05-02 15:31:39 DEBUG long poll response 200 OK
# 2015-05-02 15:31:39 DEBUG got msg [[[121,["noop"]]]]
authhead = (sapisid, msec, origin) ->
auth_string = "#{msec} #{sapisid} #{origin}"
auth_hash = crypto.createHash('sha1').update(auth_string).digest 'hex'
return {
authorization: "SAPISIDHASH #{msec}_#{auth_hash}"
'x-origin': origin
'x-goog-authuser': '0'
}
sapisidof = (jarstore) ->
jar = new CookieJar jarstore
cookies = jar.getCookiesSync ORIGIN_URL
cookie = find cookies, (cookie) -> cookie.key == 'PI:KEY:<KEY>END_PI'
return cookie?.value
MAX_RETRIES = 5
module.exports = class Channel
constructor: (@jarstore, @proxy) ->
@pushParser = new PushDataParser()
fetchPvt: =>
log.debug 'fetching pvt'
opts =
method: 'GET'
uri: "#{ORIGIN_URL}/talkgadget/_/extension-start"
jar: request.jar @jarstore
withCredentials: true
req(opts).then (res) =>
data = JSON.parse res.body
log.debug 'found pvt token', data[1]
data[1]
.fail (err) ->
log.info 'fetchPvt failed', fmterr(err)
Q.reject err
authHeaders: ->
sapisid = sapisidof @jarstore
unless sapisid
log.warn 'no SAPISID cookie'
return null
authhead sapisid, Date.now(), ORIGIN_URL
fetchSid: =>
auth = @authHeaders()
return Q.reject new Error("No auth headers") unless auth
Q().then =>
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
RID: 81187
ctype: 'hangouts'
form:
count: 0
headers: auth
encoding: null # get body as buffer
withCredentials: true
req(opts).then (res) ->
# Example format (after parsing JS):
# [ [0,["c","SID_HERE","",8]],
# [1,[{"gsid":"GSESSIONID_HERE"}]]]
if res.statusCode == 200
p = new PushDataParser(res.body)
line = p.pop()
[_,[_,sid]] = line[0]
[_,[{gsid}]] = line[1]
log.debug 'found sid/gsid', sid, gsid
return {sid,gsid}
else
log.warn 'failed to get sid', res.statusCode, res.body
.fail (err) ->
log.info 'fetchSid failed', fmterr(err)
Q.reject err
# get next messages from channel
getLines: =>
@start() unless @running
@pushParser.allLines()
# start polling
start: =>
retries = MAX_RETRIES
@running = true
@sid = null # ensures we get a new sid
@gsid = null
@subscribed = false
run = =>
# graceful stop of polling
return unless @running
@poll(retries).then ->
# XXX we only reset to MAX_RETRIES after a full ended
# poll. this means in bad network conditions we get an
# edge case where retries never reset despite getting
# (interrupted) good polls. perhaps move retries to
# instance var?
retries = MAX_RETRIES # reset on success
run()
.fail (err) =>
# abort token is not an error
return if err == ABORT
retries--
log.debug 'poll error', err
if retries > 0
run()
else
@running = false
# resetting with error makes pushParser.allLines()
# resolve with that error, which in turn makes
# @getLines() propagate the error out.
@pushParser.reset(err)
run()
return null
# gracefully stop polling
stop: =>
log.debug 'channel stop'
# stop looping
@running = false
# this releases the @getLines() promise
@pushParser?.reset?()
# abort current request
@currentReq?.abort?()
poll: (retries) =>
Q().then ->
backoffTime = 2 * (MAX_RETRIES - retries) * 1000
log.debug 'backing off for', backoffTime, 'ms' if backoffTime
wait backoffTime
.then =>
Q.reject ABORT unless @running
.then =>
unless @sid
@fetchSid().then (o) =>
merge this, o # set on this
@pushParser.reset() # ensure no half data
.then =>
@reqpoll()
# long polling
reqpoll: => Q.Promise (rs, rj) =>
log.debug 'long poll req'
opts =
method: 'GET'
uri: op 'channel/bind'
jar: request.jar @jarstore
qs:
VER: 8
gsessionid: @gsid
RID: 'rpc'
t: 1
SID: @sid
CI: 0
ctype: 'hangouts'
TYPE: 'xmlhttp'
headers: @authHeaders()
encoding: null # get body as buffer
timeout: 30000 # 30 seconds timeout in connect attempt
withCredentials: true
ok = false
@currentReq = request(opts).on 'response', (res) =>
log.debug 'long poll response', res.statusCode, res.statusMessage
if res.statusCode == 200
return ok = true
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
rj NetworkError.forRes(res)
.on 'data', (chunk) =>
if ok
# log.debug 'long poll chunk\n' + require('hexy').hexy(chunk)
@pushParser.parse chunk
# subscribe on first data received
@subscribe() unless @subscribed
.on 'error', (err) =>
log.debug 'long poll error', err
rj err
.on 'end', ->
log.debug 'long poll end'
rs()
# Subscribes the channel to receive relevant events. Only needs to
# be called when a new channel (SID/gsessionid) is opened.
subscribe: =>
return if @subscribed
@subscribed = true
Q().then ->
wait(1000) # https://github.com/tdryer/hangups/issues/58
.then =>
timestamp = Date.now() * 1000
services = ['babel', 'babel_presence_last_seen']
mapList = for service in services
JSON.stringify({"3": {"1": {"1": service}}})
formMap = {count: mapList.length, ofs: 0}
for el, ix in mapList
formMap["req#{ix}_p"] = el
opts =
method: 'POST'
uri: op 'channel/bind'
jar: request.jar @jarstore
proxy: @proxy
qs:
VER: 8
RID: 81188
ctype: 'hangouts'
gsessionid: @gsid
SID: @sid
headers: @authHeaders()
timeout: 30000 # 30 seconds timeout in connect attempt
form: formMap
withCredentials: true
req(opts)
.then (res) ->
if res.statusCode == 200
return log.debug 'subscribed channel'
else if isUnknownSID(res)
ok = false
log.debug 'sid became invalid'
@sid = null
@gsid = null
@subscribed = false
Q.reject NetworkError.forRes(res)
.fail (err) =>
log.info 'subscribe failed', fmterr(err)
@subscribed = false
Q.reject err
|
[
{
"context": "erviceModel\n) ->\n\n ###*\n # @author David Bouman\n # @module App\n # @submodule ",
"end": 463,
"score": 0.9998357892036438,
"start": 451,
"tag": "NAME",
"value": "David Bouman"
}
] | generators/app/templates/src/collections/api-services.coffee | marviq/generator-bat | 3 | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
require( './../models/api-service.coffee' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
'./../models/api-service.coffee'
], factory )
return
)((
Backbone
ApiServiceModel
) ->
###*
# @author David Bouman
# @module App
# @submodule Collections
###
###*
# A collection of services available on an API.
#
# @class ApiServicesCollection
# @extends Backbone.Collection
# @constructor
###
class ApiServicesCollection extends Backbone.Collection
###*
# The collection's `{{#crossLink 'ApiServiceModel'}}{{/crossLink}}` constructor.
#
# @property model
# @type Function
# @protected
# @final
#
# @default ApiServiceModel
###
model: ApiServiceModel
###*
# The API's base url.
#
# This property will be initialized from the `options.url` constructor argument.
#
# @property url
# @type String
###
url: undefined
###*
# Initialize the `@url` property from `options`.
#
# @method initialize
# @protected
#
# @param {Array} [models] An initial array of models for the collection.
# @param {Object} [options]
# @param {String} [options.url] The base url for the API.
###
initialize: ( models, options ) ->
@url = options?.url
return
)
| 119258 | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
require( './../models/api-service.coffee' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
'./../models/api-service.coffee'
], factory )
return
)((
Backbone
ApiServiceModel
) ->
###*
# @author <NAME>
# @module App
# @submodule Collections
###
###*
# A collection of services available on an API.
#
# @class ApiServicesCollection
# @extends Backbone.Collection
# @constructor
###
class ApiServicesCollection extends Backbone.Collection
###*
# The collection's `{{#crossLink 'ApiServiceModel'}}{{/crossLink}}` constructor.
#
# @property model
# @type Function
# @protected
# @final
#
# @default ApiServiceModel
###
model: ApiServiceModel
###*
# The API's base url.
#
# This property will be initialized from the `options.url` constructor argument.
#
# @property url
# @type String
###
url: undefined
###*
# Initialize the `@url` property from `options`.
#
# @method initialize
# @protected
#
# @param {Array} [models] An initial array of models for the collection.
# @param {Object} [options]
# @param {String} [options.url] The base url for the API.
###
initialize: ( models, options ) ->
@url = options?.url
return
)
| true | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
require( './../models/api-service.coffee' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
'./../models/api-service.coffee'
], factory )
return
)((
Backbone
ApiServiceModel
) ->
###*
# @author PI:NAME:<NAME>END_PI
# @module App
# @submodule Collections
###
###*
# A collection of services available on an API.
#
# @class ApiServicesCollection
# @extends Backbone.Collection
# @constructor
###
class ApiServicesCollection extends Backbone.Collection
###*
# The collection's `{{#crossLink 'ApiServiceModel'}}{{/crossLink}}` constructor.
#
# @property model
# @type Function
# @protected
# @final
#
# @default ApiServiceModel
###
model: ApiServiceModel
###*
# The API's base url.
#
# This property will be initialized from the `options.url` constructor argument.
#
# @property url
# @type String
###
url: undefined
###*
# Initialize the `@url` property from `options`.
#
# @method initialize
# @protected
#
# @param {Array} [models] An initial array of models for the collection.
# @param {Object} [options]
# @param {String} [options.url] The base url for the API.
###
initialize: ( models, options ) ->
@url = options?.url
return
)
|
[
{
"context": "ode\n constructor: (@i, @j, @status) ->\n\n key: -> \"#{@i} - #{@j}\"\n\n getAdjacentNodes: (selector = \"FREE\") ->\n i",
"end": 73,
"score": 0.9558624029159546,
"start": 58,
"tag": "KEY",
"value": "\"#{@i} - #{@j}\""
},
{
"context": "----------------------------... | data/coffeescript/ea6cb5496eae8b1a4783795b598b637f_quora.coffee | maxim5/code-inspector | 5 | class Node
constructor: (@i, @j, @status) ->
key: -> "#{@i} - #{@j}"
getAdjacentNodes: (selector = "FREE") ->
if selector is "ALL" then return @adjascents
else
choices = selector.split " "
return (node for node in @adjascents when node.status in choices)
setAdjacentNodes: (@adjascents) ->
minDistanceTo: (target) ->
distX = @i - target.i
if distX < 0 then distX = 0 - distX
distY = @j - target.j
if distY < 0 then distY = 0 - distY
return distX + distY
equals: (node) ->
return if @i == node.i and @j == node.j then true else false
class Board
constructor: (@statusArray) ->
@boardArray = []
@height = statusArray.length
@width = statusArray[0].length
for line, i in statusArray
@boardArray[i]=[]
for status, j in line
@boardArray[i][j]=new Node(i,j,status)
if status == 'START' then @startingNode = @boardArray[i][j]
if status == 'END' then @endingNode = @boardArray[i][j]
for i in [0..(@height-1)]
for j in [0..(@width-1)]
adjacentNodes = (node for node in [this.getNode(i-1,j), this.getNode(i+1,j), this.getNode(i,j-1), this.getNode(i,j+1)] when node?)
this.getNode(i,j).setAdjacentNodes(adjacentNodes)
getNode: (i,j) ->
if i >=0 and i < @height and j >=0 and j < @width then return @boardArray[i][j]
return null
prettyPrint: ->
string = ""
for i in [0..(@height-1)]
string+="\n"
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+=" "
when 'END' then string+='E'
else string+='X'
return string
key: ->
string = ""
for i in [0..(@height-1)]
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+="F"
when 'END' then string+='E'
else string+='X'
return string
advanceStartingNode: (newStart, mode = "ADVANCE") ->
newStart.status='START'
if mode == "ADVANCE" then @startingNode.status="VISITED"
if mode == "ROLLBACK" then @startingNode.status="FREE"
@startingNode=newStart
#------------------------------------------------------------
# AStar by Ben Nolan
# http://bennolan.com/2011/04/11/astar-in-coffeescript.html
# Computes the shortest path between 2 nodes
# Usage :
# aStarCalculator = new AStar
# shortestPath = aStarCalculator.findPath(startNode, endNode)
# shortestPath is then an array of nodes or null if none is found
#------------------------------------------------------------
class AStar
constructor: (@maxHeuristic) ->
@openNodes = {} # List of openNodes nodes (nodes to be inspected)
@closedNodes = {} # List of closedNodes nodes (nodes we've already inspected)
findPath: (start, destination) ->
# g = 0 #Cost from start to current node
# h = heuristic(start, destination) #Cost from current node to destination
# var f = g+h #Cost from start to destination going through the current node
start.f = @heuristic(start, destination)
# Push the start node onto the list of openNodes nodes
# openNodes.push(start)
@openNodes[start.key()] = start
#Keep going while there's nodes in our openNodes list
while @openNodes
#console.log @openNodes
#Find the best openNodes node (lowest f value)
#Alternately, you could simply keep the openNodes list sorted by f value lowest to highest,
#in which case you always use the first node
node = { f : Infinity }
for key, n of @openNodes
if n.f < node.f
node = n
# No nodes remain in openNodes
if node.f == Infinity
# No path could be found...
#console.log "No path could be found"
return null
# console.log @closedNodes
# Check if we've reached our destination
if node.equals(destination)
path = [destination]
while (node != start) # && (node.parentKey)
node = @closedNodes[node.parentKey]
path.push node
path.reverse()
return path
# Remove the current node from our openNodes list
delete @openNodes[node.key()]
# Push it onto the closedNodes list
@closedNodes[node.key()] = node
# Expand our current node
for n in node.getAdjacentNodes('FREE END') when (!@closedNodes[n.key()]) && (!@openNodes[n.key()])
# console.log(n.key())
n.f = @heuristic(n, destination)
n.parentKey = node.key()
@openNodes[n.key()] = n
# Prevent really long paths
###
if n.f < @maxHeuristic
@openNodes[n.key()] = n
###
# else
# @closedNodes[n.key()] = n
# An A* heurisitic must be admissible, meaning it must never overestimate the
# distance to the goal. In other words, it must either underestimate or return
# exactly the distance to the goal.
heuristic: (a, b) ->
a.minDistanceTo(b)
#------------------------------------------------------------
# ResultKeeper : simplest key/value store ever
#------------------------------------------------------------
class ResultKeeper
constructor: ->
@store = []
keep: (key, value) ->
@store[key] = value
get: (key) ->
return @store[key]
#------------------------------------------------------------
# Starting of the Quora resolution algorithm
#------------------------------------------------------------
steps = 0
resultKeeper = new ResultKeeper
getQuoraScore= (board) ->
if isQuoraSolution(board) then return 1;
if resultKeeper.get(board.key())? then return resultKeeper.get(board.key())
if isQuoraImpossible(board) then return 0;
steps = steps+1
score = 0
# If one of the adjascent nodes has only 1 exit, we have to enter
# it and we cant go into any other node
forcedEntry = false
for newStart in board.startingNode.getAdjacentNodes('FREE')
if newStart.getAdjacentNodes('FREE END').length == 1
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
forcedEntry = true
break
if not forcedEntry
for newStart in board.startingNode.getAdjacentNodes('FREE')
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
resultKeeper.keep(board.key(), score)
return score
isQuoraSolution= (board) ->
for i in [0..(board.height-1)]
for j in [0..(board.width-1)]
if board.getNode(i,j).status == 'FREE' then return false
if board.startingNode.getAdjacentNodes('END').length == 1 then return true
else return false
isQuoraImpossible= (board) ->
if board.startingNode.getAdjacentNodes()?.length == 0 then return true
# Apply A* algorithm to all adjascent nodes, to check that each of them can still
# reach the exit.
for node in board.startingNode.getAdjacentNodes('FREE')
pathToEnd = new AStar(board.width * board.heigth).findPath(node, board.endingNode)
if not pathToEnd? then return true
return false
#-----------------------------
# This is where the fun begins
#-----------------------------
myBoardArray=[["START", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["END", "FREE", "FREE", "FREE", "FREE", "CLOSED", "CLOSED"]]
firstBoard = new Board(myBoardArray)
time1 = new Date().getTime()
score = getQuoraScore(firstBoard)
time2 = new Date().getTime()
durationMS = (time2 - time1)
durationS = durationMS /1000
console.log score+" solutions in "+steps+" steps computed in "+durationS+" seconds ("+(durationMS / steps)+"ms / step)" | 160095 | class Node
constructor: (@i, @j, @status) ->
key: -> <KEY>
getAdjacentNodes: (selector = "FREE") ->
if selector is "ALL" then return @adjascents
else
choices = selector.split " "
return (node for node in @adjascents when node.status in choices)
setAdjacentNodes: (@adjascents) ->
minDistanceTo: (target) ->
distX = @i - target.i
if distX < 0 then distX = 0 - distX
distY = @j - target.j
if distY < 0 then distY = 0 - distY
return distX + distY
equals: (node) ->
return if @i == node.i and @j == node.j then true else false
class Board
constructor: (@statusArray) ->
@boardArray = []
@height = statusArray.length
@width = statusArray[0].length
for line, i in statusArray
@boardArray[i]=[]
for status, j in line
@boardArray[i][j]=new Node(i,j,status)
if status == 'START' then @startingNode = @boardArray[i][j]
if status == 'END' then @endingNode = @boardArray[i][j]
for i in [0..(@height-1)]
for j in [0..(@width-1)]
adjacentNodes = (node for node in [this.getNode(i-1,j), this.getNode(i+1,j), this.getNode(i,j-1), this.getNode(i,j+1)] when node?)
this.getNode(i,j).setAdjacentNodes(adjacentNodes)
getNode: (i,j) ->
if i >=0 and i < @height and j >=0 and j < @width then return @boardArray[i][j]
return null
prettyPrint: ->
string = ""
for i in [0..(@height-1)]
string+="\n"
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+=" "
when 'END' then string+='E'
else string+='X'
return string
key: ->
string = ""
for i in [0..(@height-1)]
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+="F"
when 'END' then string+='E'
else string+='X'
return string
advanceStartingNode: (newStart, mode = "ADVANCE") ->
newStart.status='START'
if mode == "ADVANCE" then @startingNode.status="VISITED"
if mode == "ROLLBACK" then @startingNode.status="FREE"
@startingNode=newStart
#------------------------------------------------------------
# AStar by <NAME>
# http://bennolan.com/2011/04/11/astar-in-coffeescript.html
# Computes the shortest path between 2 nodes
# Usage :
# aStarCalculator = new AStar
# shortestPath = aStarCalculator.findPath(startNode, endNode)
# shortestPath is then an array of nodes or null if none is found
#------------------------------------------------------------
class AStar
constructor: (@maxHeuristic) ->
@openNodes = {} # List of openNodes nodes (nodes to be inspected)
@closedNodes = {} # List of closedNodes nodes (nodes we've already inspected)
findPath: (start, destination) ->
# g = 0 #Cost from start to current node
# h = heuristic(start, destination) #Cost from current node to destination
# var f = g+h #Cost from start to destination going through the current node
start.f = @heuristic(start, destination)
# Push the start node onto the list of openNodes nodes
# openNodes.push(start)
@openNodes[start.key()] = start
#Keep going while there's nodes in our openNodes list
while @openNodes
#console.log @openNodes
#Find the best openNodes node (lowest f value)
#Alternately, you could simply keep the openNodes list sorted by f value lowest to highest,
#in which case you always use the first node
node = { f : Infinity }
for key, n of @openNodes
if n.f < node.f
node = n
# No nodes remain in openNodes
if node.f == Infinity
# No path could be found...
#console.log "No path could be found"
return null
# console.log @closedNodes
# Check if we've reached our destination
if node.equals(destination)
path = [destination]
while (node != start) # && (node.parentKey)
node = @closedNodes[node.parentKey]
path.push node
path.reverse()
return path
# Remove the current node from our openNodes list
delete @openNodes[node.key()]
# Push it onto the closedNodes list
@closedNodes[node.key()] = node
# Expand our current node
for n in node.getAdjacentNodes('FREE END') when (!@closedNodes[n.key()]) && (!@openNodes[n.key()])
# console.log(n.key())
n.f = @heuristic(n, destination)
n.parentKey = node.key()
@openNodes[n.key()] = n
# Prevent really long paths
###
if n.f < @maxHeuristic
@openNodes[n.key()] = n
###
# else
# @closedNodes[n.key()] = n
# An A* heurisitic must be admissible, meaning it must never overestimate the
# distance to the goal. In other words, it must either underestimate or return
# exactly the distance to the goal.
heuristic: (a, b) ->
a.minDistanceTo(b)
#------------------------------------------------------------
# ResultKeeper : simplest key/value store ever
#------------------------------------------------------------
class ResultKeeper
constructor: ->
@store = []
keep: (key, value) ->
@store[key] = value
get: (key) ->
return @store[key]
#------------------------------------------------------------
# Starting of the Quora resolution algorithm
#------------------------------------------------------------
steps = 0
resultKeeper = new ResultKeeper
getQuoraScore= (board) ->
if isQuoraSolution(board) then return 1;
if resultKeeper.get(board.key())? then return resultKeeper.get(board.key())
if isQuoraImpossible(board) then return 0;
steps = steps+1
score = 0
# If one of the adjascent nodes has only 1 exit, we have to enter
# it and we cant go into any other node
forcedEntry = false
for newStart in board.startingNode.getAdjacentNodes('FREE')
if newStart.getAdjacentNodes('FREE END').length == 1
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
forcedEntry = true
break
if not forcedEntry
for newStart in board.startingNode.getAdjacentNodes('FREE')
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
resultKeeper.keep(board.key(), score)
return score
isQuoraSolution= (board) ->
for i in [0..(board.height-1)]
for j in [0..(board.width-1)]
if board.getNode(i,j).status == 'FREE' then return false
if board.startingNode.getAdjacentNodes('END').length == 1 then return true
else return false
isQuoraImpossible= (board) ->
if board.startingNode.getAdjacentNodes()?.length == 0 then return true
# Apply A* algorithm to all adjascent nodes, to check that each of them can still
# reach the exit.
for node in board.startingNode.getAdjacentNodes('FREE')
pathToEnd = new AStar(board.width * board.heigth).findPath(node, board.endingNode)
if not pathToEnd? then return true
return false
#-----------------------------
# This is where the fun begins
#-----------------------------
myBoardArray=[["START", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["END", "FREE", "FREE", "FREE", "FREE", "CLOSED", "CLOSED"]]
firstBoard = new Board(myBoardArray)
time1 = new Date().getTime()
score = getQuoraScore(firstBoard)
time2 = new Date().getTime()
durationMS = (time2 - time1)
durationS = durationMS /1000
console.log score+" solutions in "+steps+" steps computed in "+durationS+" seconds ("+(durationMS / steps)+"ms / step)" | true | class Node
constructor: (@i, @j, @status) ->
key: -> PI:KEY:<KEY>END_PI
getAdjacentNodes: (selector = "FREE") ->
if selector is "ALL" then return @adjascents
else
choices = selector.split " "
return (node for node in @adjascents when node.status in choices)
setAdjacentNodes: (@adjascents) ->
minDistanceTo: (target) ->
distX = @i - target.i
if distX < 0 then distX = 0 - distX
distY = @j - target.j
if distY < 0 then distY = 0 - distY
return distX + distY
equals: (node) ->
return if @i == node.i and @j == node.j then true else false
class Board
constructor: (@statusArray) ->
@boardArray = []
@height = statusArray.length
@width = statusArray[0].length
for line, i in statusArray
@boardArray[i]=[]
for status, j in line
@boardArray[i][j]=new Node(i,j,status)
if status == 'START' then @startingNode = @boardArray[i][j]
if status == 'END' then @endingNode = @boardArray[i][j]
for i in [0..(@height-1)]
for j in [0..(@width-1)]
adjacentNodes = (node for node in [this.getNode(i-1,j), this.getNode(i+1,j), this.getNode(i,j-1), this.getNode(i,j+1)] when node?)
this.getNode(i,j).setAdjacentNodes(adjacentNodes)
getNode: (i,j) ->
if i >=0 and i < @height and j >=0 and j < @width then return @boardArray[i][j]
return null
prettyPrint: ->
string = ""
for i in [0..(@height-1)]
string+="\n"
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+=" "
when 'END' then string+='E'
else string+='X'
return string
key: ->
string = ""
for i in [0..(@height-1)]
for j in [0..(@width-1)]
switch this.getNode(i,j).status
when 'START' then string+="B"
when 'FREE' then string+="F"
when 'END' then string+='E'
else string+='X'
return string
advanceStartingNode: (newStart, mode = "ADVANCE") ->
newStart.status='START'
if mode == "ADVANCE" then @startingNode.status="VISITED"
if mode == "ROLLBACK" then @startingNode.status="FREE"
@startingNode=newStart
#------------------------------------------------------------
# AStar by PI:NAME:<NAME>END_PI
# http://bennolan.com/2011/04/11/astar-in-coffeescript.html
# Computes the shortest path between 2 nodes
# Usage :
# aStarCalculator = new AStar
# shortestPath = aStarCalculator.findPath(startNode, endNode)
# shortestPath is then an array of nodes or null if none is found
#------------------------------------------------------------
class AStar
constructor: (@maxHeuristic) ->
@openNodes = {} # List of openNodes nodes (nodes to be inspected)
@closedNodes = {} # List of closedNodes nodes (nodes we've already inspected)
findPath: (start, destination) ->
# g = 0 #Cost from start to current node
# h = heuristic(start, destination) #Cost from current node to destination
# var f = g+h #Cost from start to destination going through the current node
start.f = @heuristic(start, destination)
# Push the start node onto the list of openNodes nodes
# openNodes.push(start)
@openNodes[start.key()] = start
#Keep going while there's nodes in our openNodes list
while @openNodes
#console.log @openNodes
#Find the best openNodes node (lowest f value)
#Alternately, you could simply keep the openNodes list sorted by f value lowest to highest,
#in which case you always use the first node
node = { f : Infinity }
for key, n of @openNodes
if n.f < node.f
node = n
# No nodes remain in openNodes
if node.f == Infinity
# No path could be found...
#console.log "No path could be found"
return null
# console.log @closedNodes
# Check if we've reached our destination
if node.equals(destination)
path = [destination]
while (node != start) # && (node.parentKey)
node = @closedNodes[node.parentKey]
path.push node
path.reverse()
return path
# Remove the current node from our openNodes list
delete @openNodes[node.key()]
# Push it onto the closedNodes list
@closedNodes[node.key()] = node
# Expand our current node
for n in node.getAdjacentNodes('FREE END') when (!@closedNodes[n.key()]) && (!@openNodes[n.key()])
# console.log(n.key())
n.f = @heuristic(n, destination)
n.parentKey = node.key()
@openNodes[n.key()] = n
# Prevent really long paths
###
if n.f < @maxHeuristic
@openNodes[n.key()] = n
###
# else
# @closedNodes[n.key()] = n
# An A* heurisitic must be admissible, meaning it must never overestimate the
# distance to the goal. In other words, it must either underestimate or return
# exactly the distance to the goal.
heuristic: (a, b) ->
a.minDistanceTo(b)
#------------------------------------------------------------
# ResultKeeper : simplest key/value store ever
#------------------------------------------------------------
class ResultKeeper
constructor: ->
@store = []
keep: (key, value) ->
@store[key] = value
get: (key) ->
return @store[key]
#------------------------------------------------------------
# Starting of the Quora resolution algorithm
#------------------------------------------------------------
steps = 0
resultKeeper = new ResultKeeper
getQuoraScore= (board) ->
if isQuoraSolution(board) then return 1;
if resultKeeper.get(board.key())? then return resultKeeper.get(board.key())
if isQuoraImpossible(board) then return 0;
steps = steps+1
score = 0
# If one of the adjascent nodes has only 1 exit, we have to enter
# it and we cant go into any other node
forcedEntry = false
for newStart in board.startingNode.getAdjacentNodes('FREE')
if newStart.getAdjacentNodes('FREE END').length == 1
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
forcedEntry = true
break
if not forcedEntry
for newStart in board.startingNode.getAdjacentNodes('FREE')
curStart = board.startingNode
board.advanceStartingNode(newStart)
score = score+getQuoraScore(board)
board.advanceStartingNode(curStart,'ROLLBACK')
resultKeeper.keep(board.key(), score)
return score
isQuoraSolution= (board) ->
for i in [0..(board.height-1)]
for j in [0..(board.width-1)]
if board.getNode(i,j).status == 'FREE' then return false
if board.startingNode.getAdjacentNodes('END').length == 1 then return true
else return false
isQuoraImpossible= (board) ->
if board.startingNode.getAdjacentNodes()?.length == 0 then return true
# Apply A* algorithm to all adjascent nodes, to check that each of them can still
# reach the exit.
for node in board.startingNode.getAdjacentNodes('FREE')
pathToEnd = new AStar(board.width * board.heigth).findPath(node, board.endingNode)
if not pathToEnd? then return true
return false
#-----------------------------
# This is where the fun begins
#-----------------------------
myBoardArray=[["START", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["FREE", "FREE", "FREE", "FREE", "FREE", "FREE", "FREE"],
["END", "FREE", "FREE", "FREE", "FREE", "CLOSED", "CLOSED"]]
firstBoard = new Board(myBoardArray)
time1 = new Date().getTime()
score = getQuoraScore(firstBoard)
time2 = new Date().getTime()
durationMS = (time2 - time1)
durationS = durationMS /1000
console.log score+" solutions in "+steps+" steps computed in "+durationS+" seconds ("+(durationMS / steps)+"ms / step)" |
[
{
"context": "\nBundle: Account (assets)\nProject: Waaave\nAuthors: Julien Le Coupanec, Valerian Saliou\nCopyright: 2014, Waaave\n###\n\n__ ",
"end": 72,
"score": 0.9998700022697449,
"start": 54,
"tag": "NAME",
"value": "Julien Le Coupanec"
},
{
"context": "sets)\nProject: Waaave\nAu... | static/src/assets/account/javascripts/account.coffee | valeriansaliou/waaave-web | 1 | ###
Bundle: Account (assets)
Project: Waaave
Authors: Julien Le Coupanec, Valerian Saliou
Copyright: 2014, Waaave
###
__ = window
class Account
init: ->
try
@_input_all = $ 'input'
@_body = $ '#body'
@_form_invalid_sel = $ 'form.is_invalid'
catch error
Console.error 'Account.init', error
shake_invalid_form: ->
try
@_form_invalid_sel.effect(
'shake',
times: 2,
350
)
catch error
Console.error 'Account.shake_invalid_form', error
focus_required_input: ->
try
@_body.find('input.input-error:visible, input:not([value]):visible').filter(':first').focus()
catch error
Console.error 'Account.focus_required_input', error
@Account = new Account
$(document).ready ->
__.Account.init()
__.Account.shake_invalid_form()
__.Account.focus_required_input()
LayoutRegistry.register_bundle 'Account'
| 103650 | ###
Bundle: Account (assets)
Project: Waaave
Authors: <NAME>, <NAME>
Copyright: 2014, Waaave
###
__ = window
class Account
init: ->
try
@_input_all = $ 'input'
@_body = $ '#body'
@_form_invalid_sel = $ 'form.is_invalid'
catch error
Console.error 'Account.init', error
shake_invalid_form: ->
try
@_form_invalid_sel.effect(
'shake',
times: 2,
350
)
catch error
Console.error 'Account.shake_invalid_form', error
focus_required_input: ->
try
@_body.find('input.input-error:visible, input:not([value]):visible').filter(':first').focus()
catch error
Console.error 'Account.focus_required_input', error
@Account = new Account
$(document).ready ->
__.Account.init()
__.Account.shake_invalid_form()
__.Account.focus_required_input()
LayoutRegistry.register_bundle 'Account'
| true | ###
Bundle: Account (assets)
Project: Waaave
Authors: PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
Copyright: 2014, Waaave
###
__ = window
class Account
init: ->
try
@_input_all = $ 'input'
@_body = $ '#body'
@_form_invalid_sel = $ 'form.is_invalid'
catch error
Console.error 'Account.init', error
shake_invalid_form: ->
try
@_form_invalid_sel.effect(
'shake',
times: 2,
350
)
catch error
Console.error 'Account.shake_invalid_form', error
focus_required_input: ->
try
@_body.find('input.input-error:visible, input:not([value]):visible').filter(':first').focus()
catch error
Console.error 'Account.focus_required_input', error
@Account = new Account
$(document).ready ->
__.Account.init()
__.Account.shake_invalid_form()
__.Account.focus_required_input()
LayoutRegistry.register_bundle 'Account'
|
[
{
"context": "\n location: [ 480, 28 ]\n pedestal:\n name: 'Master Sword Pedestal'\n requires: [ 'red_pendant', 'blue_pendant', '",
"end": 15701,
"score": 0.7791166305541992,
"start": 15680,
"tag": "NAME",
"value": "Master Sword Pedestal"
},
{
"context": " location: [ 61, ... | src/data.cson | Xenoveritas/lttp-tracker | 0 | # LTTP database. Note that this is "compiled" into a JavaScript file that is
# loaded.
# The items list is basically a list of names for collectibles. Other data is
# listed for them, but it's never used. These define the "base states" that
# rules are based on.
items:
sword_1:
name: "Fighter's Sword"
slot: "sword"
type: "equipment"
sword_2:
name: "Master Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_1"
sword_3:
name: "Tempered Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_2"
sword_4:
name: "Golden Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_3"
shield_1:
name: "Fighter's Shield"
slot: "shield"
type: "equipment"
shield_2:
name: "Red Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_1"
shield_3:
name: "Mirror Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_2"
green_jerkin:
name: "Green Jerkin"
slot: "armor"
type: "equipment"
"default": true
blue_mail:
name: "Blue Mail"
slot: "armor"
type: "equipment"
upgrades: "green_jerkin"
red_mail:
name: "Red Mail"
slot: "armor"
type: "equipment"
upgrades: "blue_mail"
pegasus_boots:
name: "Pegasus Shoes"
type: "equipment"
power_glove:
name: "Power Glove"
slot: "glove"
type: "equipment"
titans_mitt:
name: "Titan's Mitt"
slot: "glove"
type: "equipment"
upgrades: "power_glove"
flippers:
name: "Zora's Flippers"
type: "equipment"
moon_pearl:
name: "Moon Pearl"
type: "equipment"
# Just the bow - necessary for retro mode
bow:
name: "Bow"
# For retro mode:
rupee_quiver:
name: "Rupee Quiver"
# The bow with wooden arrows
bow_and_wooden_arrows:
name: "Bow & Wooden Arrows"
# The bow with silver arrows
bow_and_silver_arrows:
name: "Bow & Silver Arrows"
boomerang:
name: "Boomerang"
magic_boomerang:
name: "Magical Boomerang"
upgrades: "boomerang"
hookshot:
name: "Hookshot"
bombs:
name: "Bomb"
mushroom:
name: "Mushroom"
magic_powder:
name: "Magic Powder"
fire_rod:
name: "Fire Rod"
ice_rod:
name: "Ice Rod"
bombos:
name: "Bombos Medallion"
ether:
name: "Ether Medallion"
quake:
name: "Quake Medallion"
lamp:
name: "Lantern"
hammer:
name: "Magic Hammer"
shovel:
name: "Shovel"
flute:
name: "Flute"
upgrades: "shovel"
net:
name: "Bug-Catching Net"
book:
name: "Book of Mudora"
bottle:
name: "Bottle"
stacks: 4
# This isn't used but is listed anyway on the off chance it ever gets used:
contains: [
"Medicine of Life", "Medicine of Magic", "Medicine of Life and Magic",
"Fairy", "Bee", "Golden Bee"
]
red_cane:
name: "Cane of Somaria"
blue_cane:
name: "Cane of Byrna"
cape:
name: "Magic Cape"
mirror:
name: "Magic Mirror"
green_pendant:
name: "Pendant of Courage"
type: "pendant"
red_pendant:
name: "Pendant of Wisdom"
type: "pendant"
blue_pendant:
name: "Pendant of Power"
type: "pendant"
crystal_1:
name: "Crystal 1"
type: "crystal"
crystal_2:
name: "Crystal 2"
type: "crystal"
crystal_3:
name: "Crystal 3"
type: "crystal"
crystal_4:
name: "Crystal 4"
type: "crystal"
crystal_5:
name: "Crystal 5"
type: "crystal"
superbomb: true
crystal_6:
name: "Crystal 6"
type: "crystal"
superbomb: true
crystal_7:
name: "Crystal 7"
type: "crystal"
rules:
# RULES ######################################################################
# These define basic rules.
#
# Rule definitions are used just about everywhere, so here's the basics:
#
# * All rules exist in an environment. Each key for each rule and each key for
# each item and location exists in this environment and can be used in a
# rule.
# * A rule definition can contain any of the following:
# - A boolean (true, false) that sets the rule to always be that value
# - A string, that evaluates to whatever that string is in the environment
# - An object, that contains the following fields:
# * any: a list of things to logical OR together
# * all: a list of things to logical AND together
# The evaluated value of any and all will be ANDed together to create
# the final evaluated value of the rule.
# Each "thing" in that list can either be a string (to look up a
# variable) or itself a rule using the same definition.
# - An array, which is treated as if it were under the "all" key above.
#
# Each rule has a name and description. Currently the description is ignored.
# Names, if provided, are used when generating tooltips that reference the
# rule. Instead of reporting the subrule, the rule's name is used instead.
light:
# Really this is a meta-item. It exists to allow "light source" to be
# flagged separately from "fire source" for anyone who wants to attempt
# dark room navigation.
name: 'Light Source'
description: 'A source of light.'
requires: 'lamp'
sword:
name: "Any Sword"
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
master_sword:
name: "Master Sword (or better)"
# Also technically full health
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
bow_and_arrows:
name: "Bow & Arrows"
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# Something that can light a torch.
light_torch:
name: 'Light Torch'
description: 'The ability to light a torch.'
requires: any: [ 'lamp', 'fire_rod' ]
weapon:
name: 'Any Weapon'
description: 'Access to a weapon'
requires: any: [ 'sword', 'bow_and_arrows', 'bombs', 'fire_rod', 'hammer',
'red_cane', 'blue_cane' ]
swimming:
name: 'Swimming'
description: 'Either Flippers or accessible via Fake Flippers'
requires: 'flippers'
# Something that can melt ice, required for the Ice Palace.
melt_ice:
name: 'Melt Ice (Fire Rod/Usable Bombos)'
description: 'The ability to melt ice: specifically Freezors and Kholdstare.'
requires: any: [ 'fire_rod', 'use_bombos' ]
defeat_popo:
name: "Defeat Popo"
description: "Ability to defeat Popos"
requires: any: [ 'weapon', 'hookshot' ]
defeat_penguins:
name: "Defeat Penguins"
description: "Ability to defeat Penguins"
# Literally the same as above, but :shrug:
requires: any: [ 'weapon', 'hookshot' ]
avoid_laser:
name: "Avoid Eye Lasers"
description: "Please do not look directly at the eye laser."
requires: any: [ 'cape', 'blue_cane', 'shield_3' ]
# -- Use Medallions --
# These rules are to indicate a medallion may be used.
use_medallion:
name: "Use Medallion"
description: 'The ability to use a medallion.'
# In Swordless, this becomes just the hammer.
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
# This rule is effectively for replacing in Swordless.
cut_drapes:
name: "Cut Drapes"
requires: 'sword'
use_bombos:
name: "Use Bombos Medallion"
requires: [ 'use_medallion', 'bombos' ]
use_ether:
name: "Use Ether Medallion"
requires: [ 'use_medallion', 'ether' ]
use_quake:
name: "Use Quake Medallion"
requires: [ 'use_medallion', 'quake' ]
mire_medallion:
name: "Misery Mire Medallion"
# The Misery Mire medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
turtle_medallion:
name: "Turtle Rock Medallion"
# The Turtle Rock medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
# NOTE: You do NOT need anything to use the medallions for dungeon unlock
# purposes in swordless mode.
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: [ 'use_medallion', 'mire_medallion' ]
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: [ 'use_medallion', 'turtle_medallion' ]
flip_switch_past_barrier:
name: 'Flip Crystal Switch Past Barrier'
description: 'Something that can flip a crystal switch past a barrier.'
requires: any: [ 'boomerang', 'magic_boomerang', 'bombs', 'fire_rod',
'ice_rod', 'bow_and_arrows', 'red_cane', 'blue_cane'
]
flip_switch:
name: 'Flip Crystal Switch'
description: 'Something that can flip a crystal switch, assuming nothing is in the way.'
requires: any: [ 'flip_switch_past_barrier', 'sword', 'hookshot', 'hammer' ]
# Whatever's required to activate a pedestal. This is mainly here so that it
# can be replaced with a rule that says "hammer" in swordless runs.
pedestal_sword:
name: 'Pedestal Sword'
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
glove:
name: 'Power Glove (or better)'
requires: any: [ 'power_glove', 'titans_mitt' ]
break_weak_wall:
name: "Bombs/Pegasus Shoes"
requires: any: [ 'bombs', 'pegasus_boots' ]
all_crystals:
name: "All Crystals"
requires: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_5',
'crystal_6', 'crystal_7' ]
# Ganon's Tower rules
gt_dms_room:
name: "Ganon's Tower - DMs Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot' ]
gt_randomizer_room:
name: "Ganon's Tower - Randomizer Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot', 'bombs' ]
gt_basement_left:
name: "Ganon's Tower - Left Side"
description: "Left side of the basement of Ganon's Tower"
requires: any: [ 'red_cane', 'bombs' ], all: [ 'hookshot', 'hammer' ]
gt_basement_right:
name: "Ganon's Tower - Right Side"
description: "Right side of the basement of Ganon's Tower"
requires: [ 'fire_rod', 'red_cane' ]
gt_big_key_room:
name: "Ganon's Tower - Big Key Room"
description: "The room with the Big Key in Ganon's Tower"
requires: [ 'gt_basement_clear', 'bombs' ]
gt_basement_clear:
name: "Ganon's Tower - Basement"
description: "Entering the common area with the Big Chest"
requires: any: [ 'gt_basement_left', 'gt_basement_right' ]
# REGIONS ######################################################################
#
# Regions define general areas of the map.
regions:
light_world:
name: "Light World"
points: "0,0 191,0 191,128 355,128"
dm_west:
name: "Death Mountain (West)"
requires: any: [ [ 'light', 'glove' ], 'flute' ]
dm_east:
name: "Death Mountain (East)"
requires: [ 'dm_west', { any: [ 'hookshot', [ 'hammer', 'mirror' ] ] } ]
turtle_rock_top:
name: 'Turtle Rock Medallion Platform'
# Access to the medallion platform.
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
dark_world:
name: 'Dark World'
# "The area of the Dark World you're supposed to start in" - this does NOT
# include the Moon Pearl. This rule gets crazy fast.
requires: any: [
# Simple enough: Defeat Agahnim, create portal
'castle_portal',
# Also simple: go through the portal near Flute Spot 5
[ 'eastern_portal', 'moon_pearl' ],
# Somewhat more complicated: go through Kakariko using the Titan's Mitt
# and have the flippers, allowing you to swim there.
[ 'moon_pearl', 'titans_mitt', 'flippers' ]
]
dw_nw:
name: 'Dark World (Northwest)'
# Basically "Theives Town and surroundings." Also includes all of Skull
# Woods. Note that the Moon Pearl is required to leave the starting area.
requires: any: [
'kakariko_portal',
# Access is also possible with any way to the general Dark World + the
# hookshot.
[ 'dark_world', 'moon_pearl', 'hookshot' ] ]
dw_nw_mirror:
name: 'Dark World (Northwest) and Magic Mirror'
requires: [ 'dw_nw', 'mirror' ]
dw_south:
name: 'Dark World (South)'
# The portion of the Dark World that's south of Thieve's Town and south
# of the Pyramid of Power. Always accessible if you can get to dw_nw
# (you can just drop down a ledge), or if you have Dark World access and the
# Magic Hammer.
requires: any: [ [ 'dark_world', 'hammer' ], 'dw_nw' ]
dw_mm:
name: 'Misery Mire (exterior)'
requires: [ 'titans_mitt', 'flute' ]
dw_mm_mirror:
name: 'Misery Mire (exterior) with Magic Mirror'
requires: [ 'dw_mm', 'mirror' ]
dw_dm_west:
name: 'Dark World Death Mountain (West)'
# The area you can get to as Bunny Link (note: does NOT include Ganon's
# Tower)
requires: 'dm_west'
dw_dm_east:
name: 'Dark World Death Mountain (East)'
# Basically the rest of Death Mountain from above. Also covers the
# Turtle Rock entrance.
requires: [ 'dm_east', 'titans_mitt' ]
locations:
# Locations are points on the map. They either indicate places that items can
# be retrieved or other points of interest.
#
# Locations have the following fields:
# name - the human-readable name of the location
# required - a rule defining the requirements for accessing the location,
# defaults to true (always available)
# visible - a rule defining what is required for a location to be "visible"
# which really only means "can see the item there" which defaults to
# false
# location - set of x,y coordinates for a location. Indicates where the
# location is on the 1024x512 map, where x between 0-511 are in the
# Light World and x between 512-1023 are in the Dark World. (Note that
# this makes them effectively tiles.)
# LIGHT WORLD LOCATIONS ######################################################
## DARK WORLD TELEPORTS ##
# These are not item locations, just areas where you can teleport to the
# Dark World from the Light World.
kakariko_portal:
name: 'Kakariko Village Portal'
type: 'portal'
# This portal is only useful with the Moon Pearl.
requires: any: [ 'titans_mitt', [ 'glove', 'hammer' ] ], all: 'moon_pearl'
location: [ 46, 172 ]
castle_portal:
name: 'Hyrule Castle Gate Portal'
# This is pretty much the only portal that doesn't instantly require the
# Moon Pearl
type: 'portal'
requires: 'castle_tower.cleared'
location: [ 256, 269 ]
eastern_portal:
name: 'Eastern Palace Portal (Flute 5)'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 498, 366 ]
swamp_portal:
name: 'Swamp Portal'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 232, 394 ]
desert_portal:
name: 'Desert Portal'
type: 'portal'
# Strictly speaking you don't instantly require the Moon Pearl for this one.
requires: [ 'flute', 'titans_mitt' ]
location: [ 20, 502 ]
ice_portal:
name: 'Ice Palace Portal'
type: 'portal'
# Strictly speaking you don't instantly dead-end without the Moon Pearl.
# Instead you dead-end after entering the palace and being unable to kill
# the Freezor.
requires: [ 'swimming', 'titans_mitt', 'moon_pearl' ]
location: [ 406, 444 ]
# It exists, technically:
spectacle_rock_portal:
name: "Spectacle Rock Portal"
type: 'portal'
requires: 'dm_west'
location: [ 288, 46 ]
dm_east_portal:
name: "Death Mountain Eastern Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt' ]
location: [ 394, 116 ]
turtle_rock_portal:
name: "Turtle Rock Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
location: [ 480, 28 ]
pedestal:
name: 'Master Sword Pedestal'
requires: [ 'red_pendant', 'blue_pendant', 'green_pendant' ]
visible: 'book'
location: [ 21, 26 ]
forest_mushroom:
name: 'Mushroom'
location: [ 61, 44 ]
thieves_cave:
name: "Thieves' Den"
location: [ 97, 67 ]
lumberjack_cave:
name: 'Lumberjack Cave'
requires: [ 'castle_tower.cleared', 'pegasus_boots' ]
visible: true
location: [ 154, 37 ]
ether_pedestal:
name: 'Ether Pedestal'
requires: [ 'dm_west', 'pedestal_sword', 'book' ]
visible: [ 'dm_west', 'book' ]
location: [ 215, 9 ]
old_man:
name: 'Lost Old Man'
requires: [ 'dm_west', 'light' ]
location: [ 229, 119 ]
spectacle_rock_cave:
name: 'Spectacle Rock Cave'
requires: 'dm_west'
location: [ 250, 74 ]
spectacle_rock:
name: 'Spectacle Rock'
requires: [ 'dm_west', 'mirror' ]
visible: 'dm_west'
location: [ 260, 44 ]
floating_island:
name: 'Floating Island'
requires: [ 'dw_dm_east', 'moon_pearl', 'bombs', 'mirror' ]
visible: 'dm_east'
location: [ 417, 12 ]
spiral_cave:
name: 'Spiral Cave'
requires: 'dm_east'
location: [ 408, 46 ]
mimic_cave:
name: 'Mimic Cave'
requires: [ 'moon_pearl', 'mirror', 'use_turtle_medallion', 'dw_dm_east' ]
location: [ 431, 47 ]
paradox_cave_basic:
name: 'Paradox Cave (no bombs)'
items: 5
requires: 'dm_east'
paradox_cave_bombs:
name: 'Paradox Cave (behind bombable wall)'
items: 2
requires: [ 'dm_east', 'bombs' ]
paradox_cave:
name: 'Paradox Cave'
merge: [ 'paradox_cave_basic', 'paradox_cave_bombs' ]
location: [ 441, 110 ]
zora:
name: 'Zora'
requires: any: [ 'glove', 'flippers' ]
rupees: 500
zora_ledge:
name: "Zora's Domain Ledge"
requires: 'flippers'
# If you can get into Zora's Domain, you can see what's on the ledge
visible: 'glove'
zoras_domain:
name: "Zora's Domain"
merge: [ "zora", "zora_ledge" ]
#location: [ 490, 64 ] - intentionally lowered as otherwise it looks like
# it was on Death Mountain
location: [ 490, 72 ]
behind_waterfall:
name: 'Waterfall Fairy'
requires: 'flippers'
items: 2
location: [ 460, 70 ]
well_floor:
name: 'Well Floor'
items: 4
well_cave:
name: 'Well Cave'
requires: 'bombs'
well:
name: 'Well'
merge: [ 'well_floor', 'well_cave' ]
location: [ 11, 217 ]
blinds_block_puzzle:
name: "Blind's House (block puzzle)"
items: 4
blinds_secret:
name: "Blind's House (bombable wall)"
requires: 'bombs'
blinds_house:
name: "Blind's House"
merge: [ 'blinds_block_puzzle', 'blinds_secret' ]
location: [ 65, 215 ]
bottle_vendor:
name: 'Bottle Vendor'
rupees: 100
location: [ 48, 238 ]
chicken_house:
name: 'Chicken House'
requires: 'bombs'
location: [ 50, 277 ]
sick_kid:
name: 'Sick Kid',
requires: 'bottle'
location: [ 79, 274 ]
tavern:
name: 'Tavern'
location: [ 81, 290 ]
blacksmiths:
name: 'Blacksmiths'
# Does NOT require the mirror: the guy will follow after save and quit
requires: [ 'dw_nw', 'titans_mitt' ]
location: [ 154, 273 ]
magic_bat:
name: 'Magic Bat'
requires: any: [ 'hammer', 'dw_nw_mirror' ], all: 'magic_powder'
location: [ 165, 287 ]
sanctuary_bonk_rocks:
name: "Sanctuary Bonk Rocks"
requires: 'pegasus_boots'
location: [ 199, 149 ]
sanctuary:
name: 'Sanctuary'
location: [ 235, 136 ]
castle_graveyard:
name: 'Castle Under Graveyard'
items: 3
requires: 'break_weak_wall'
location: [ 265, 149 ]
graveyard_cave:
name: 'Graveyard Cave'
requires: [ 'dw_nw', 'mirror' ]
location: [ 291, 140 ]
kings_tomb:
name: "King's Tomb"
requires: [ 'pegasus_boots', { any: [ 'titans_mitt', [ 'dw_nw', 'mirror' ] ] } ]
location: [ 307, 151 ]
secret_passage:
name: "Secret Passage (Uncle)"
items: 2
location: [ 304, 212 ]
witch:
name: "Witch's Hut"
requires: 'mushroom'
location: [ 409, 171 ]
sahasrala:
name: 'Sahasrala'
requires: 'green_pendant'
location: [ 414, 236 ]
sahasralas_hut:
name: "Sahasrala's Hut"
requires: 'break_weak_wall'
location: [ 414, 220 ]
race_game:
name: "15 Second Game"
requires: 'break_weak_wall'
location: [ 49, 368 ]
library:
name: "Library"
requires: 'pegasus_boots'
visible: true
location: [ 79, 336 ]
haunted_grove:
name: "Haunted Grove"
requires: 'shovel'
location: [ 146, 338 ]
links_house:
name: "Link's House"
location: [ 279, 352 ]
desert_cliff:
name: "Desert Cliff"
requires: any: [ 'book', 'dw_mm_mirror' ]
visible: true
location: [ 12, 464 ]
checkboard_cave:
name: "Checkerboard Cave"
requires: [ "dw_mm", "mirror" ]
location: [ 90, 397 ]
aginahs_cave:
name: "Aginah's Cave"
requires: 'bombs'
location: [ 102, 422 ]
south_of_grove_cave:
name: 'Cave South of Haunted Grove'
requires: [ 'dw_south', 'mirror' ]
location: [ 136, 422 ]
bombos_pedestal:
name: "Bombos Pedestal"
requires: [ "book", "dw_south", "mirror", "pedestal_sword" ]
visible: [ "book", "dw_south", "mirror" ]
location: [ 112, 470 ]
dam:
name: "Dam"
items: 2
location: [ 240, 478 ]
mini_moldorm_cave:
name: "Mini Moldorm Cave"
items: 5
requires: "bombs"
location: [ 334, 480 ]
hylia_island:
name: "Hylia Island"
requires: [ "dw_south", "moon_pearl", "flippers", "mirror" ]
# You can't actually see it from shore
visible: 'swimming'
location: [ 373, 426 ]
ice_rod_cave:
name: "Ice Rod Cave"
requires: "bombs"
location: [ 458, 395 ]
hobo:
name: 'Hobo Under the Bridge'
requires: 'swimming'
location: [ 358, 354 ]
# DARK WORLD LOCATIONS #######################################################
bumper_cave:
name: 'Bumper Cave'
requires: [ 'dw_nw', 'moon_pearl', 'cape' ]
visible: 'dw_nw'
location: [ 695, 77 ]
spike_cave:
name: 'Spike Cave'
requires: [ 'moon_pearl', 'hammer', 'dw_dm_west' ]
location: [ 805, 73 ]
hookshot_cave_full:
name: 'Hookshot Cave (three chests)'
requires: [ 'moon_pearl', 'dw_dm_east', 'glove', 'hookshot' ]
items: 3
hookshot_cave_bonk:
name: 'Hookshot Cave (bottom chest)'
requires: any: [ 'pegasus_boots', 'hookshot' ], all: [ 'moon_pearl', 'dw_dm_east', 'glove' ]
hookshot_cave:
name: 'Hookshot Cave'
merge: [ 'hookshot_cave_full', 'hookshot_cave_bonk' ]
location: [ 938, 33 ]
death_mountain_cave:
name: 'Death Mountain Cave'
items: 2
requires: [ 'dw_dm_east', 'moon_pearl' ]
location: [ 943, 74 ]
catfish:
name: 'Catfish'
requires: [ 'dark_world', 'moon_pearl', 'glove' ]
location: [ 969, 88 ]
chest_game:
name: 'Chest Game'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 537, 239 ]
doorless_hut:
name: 'Doorless Hut'
requires: [ 'moon_pearl', 'dw_nw', 'bombs' ]
location: [ 568, 298 ]
c_house:
name: 'C-Shaped House'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 618, 246 ]
peg_cave:
name: 'Peg Cave'
# Don't bother listing dw_nw as a prereq because the titans_mitt gets you
# there anyway
requires: [ 'moon_pearl', 'titans_mitt', 'hammer' ]
location: [ 674, 309 ]
purple_chest:
name: 'Locked Chest'
# This is the only item that depends on a different location. Weird!
requires: [ 'blacksmiths' ]
location: [ 666, 272 ]
fat_fairy:
name: 'Fat Fairy'
requires: [ 'moon_pearl', 'superbomb', 'dark_world' ]
location: [ 752, 248 ]
pyramid_ledge:
name: 'Pyramid Ledge'
requires: 'dark_world'
location: [ 808, 230 ]
digging_game:
name: 'Digging Game'
requires: [ 'dw_south', 'moon_pearl' ]
rupees: 80
location: [ 540, 354 ]
stumpy:
name: 'Flute Kid'
requires: [ 'dw_south', 'moon_pearl' ]
location: [ 669, 350 ]
mini_myre_cave:
name: 'Mini Myre Cave'
requires: 'dw_mm'
items: 2
location: [ 531, 409 ]
hype_cave:
name: 'Hype Cave'
items: 5
requires: [ 'dw_south', 'moon_pearl', 'bombs' ]
location: [ 818, 399 ]
superbomb:
name: 'Superbomb'
type: 'superbomb'
rupees: 100
location: [ 791, 352 ]
requires: [ 'dw_south', 'moon_pearl', 'crystal_5', 'crystal_6' ]
dungeons:
# These are almost locations, but with a few extra fields.
#
# enter - rule for requirements to (meaningfully) enter the dungeon
# (meaningfully in this case basically means "and get past the first
# room" - specific examples are flippers for Swamp Palace and a way to
# defeat the Freezor in the Ice Palace)
# boss - information about the boss:
# name - the boss's name
# defeat - requirements for defeating the boss
# access - requirements for even accessing the boss (if any)
# items - array describing all items within the dungeon. Names should be
# the names as defined in the spoiler file.
# keys - number of keys that are in the random shuffle
# notInPool - if given, a list (or single string) of generic dungeon items
# that happen to be missing from the item pool and should not count against
# chests
hyrule_castle_escape:
name: "Hyrule Castle Escape"
# This currently has no location and is therefore not shown on the map.
items: [
# Listed elsewhere:
# "Sanctuary"
# { name: "Secret Room - Left", access: 'break_weak_wall' }
# { name: "Secret Room - Middle", access: 'break_weak_wall' }
# { name: "Secret Room - Right", access: 'break_weak_wall' }
{ name: "Dark Cross", access: 'light' }
{ name: "Boomerang Chest", access: 'weapon' }
{ name: "Map Chest", access: 'weapon' }
{ name: "Zelda's Cell", access: 'weapon' }
# Also listed elsewhere:
# { name: "Link's Uncle", type: "uncle" }
# "Secret Passage"
]
keys: 1
# Compass not only isn't in the pool, it doesn't exist.
notInPool: [ 'compass', 'bigKey' ]
eastern_palace:
name: "Eastern Palace"
location: [ 490, 198 ]
boss:
name: "Armos Knights"
# Armos don't require the bow. However, there is a Red Eyegore blocking
# access to them, and it DOES require the bow.
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'bow_and_arrows', 'light' ]
items: [
"Compass Chest"
{ name: "Big Chest", type: "big" }
"Cannonball Chest"
{ name: "Big Key Chest", access: "light" }
"Map Chest"
]
keys: 0
desert_palace:
name: "Desert Palace"
location: [ 37, 408 ]
enter: any: [ 'book', 'dw_mm_mirror' ]
boss:
name: "Lanmolas"
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'light_torch', 'glove' ]
items: [
{ name: "Big Chest", type: "big" }
"Map Chest"
{ name: "Torch", type: "dash" }
{ name: "Big Key Chest", access: "defeat_popo" }
"Compass Chest"
]
keys: 1
heras_tower:
name: "Hera's Tower"
location: [ 287, 16 ]
enter: any: [ [ 'hookshot', 'hammer'], 'mirror' ], all: [ 'dm_west', 'flip_switch' ]
boss:
name: "Moldorm"
defeat: any: [ "sword", "hammer" ]
items: [
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Basement Cage", type: "standing" }
"Map Chest"
{ name: "Compass Chest", afterBigKey: true }
{ name: "Big Chest", type: "big" }
]
keys: 1
castle_tower:
name: "Castle Tower"
location: [ 255, 203 ]
enter: any: [ 'master_sword', 'cape' ]
boss:
name: "Agahnim"
prize: false
defeat: any: [ 'sword', 'net' ]
access: [ 'cut_drapes', 'light' ]
afterBigKey: false
items: [
{ name: "Castle Tower - Room 03", access: "weapon" }
{ name: "Castle Tower - Dark Maze", access: [ "light", "weapon" ] }
]
keys: 2
notInPool: [ 'map', 'compass', 'bigKey' ]
dark_palace:
name: "Palace of Darkness"
location: [ 1002, 200 ]
# Also requires 110 rupees
enter: [ 'dark_world', 'moon_pearl' ]
boss:
name: "Helmasaur King"
# I'm a bit unclear on what weapons are useful against him, but the
# hammer/bow and arrow requirement for getting there means you can
# definitely defeat him.
defeat: [ 'sword', 'bow_and_arrows' ]
access: [ 'light', 'hammer', 'bow_and_arrows' ]
items: [
"Shooter Room"
{ name: "Big Key Chest", access: "bombs" }
{ name: "The Arena - Ledge", access: [ "bow_and_arrows", "bombs" ] }
"The Arena - Bridge" # Technically behind a key
"Stalfos Basement" # Also behind a key
"Map Chest"
{ "Big Chest", type: "big", access: [ "light", "bombs" ] }
"Compass Chest" # Not sure how many keys are needed
"Harmless Hellway"
{ name: "Dark Basement - Left", access: "light" }
{ name: "Dark Basement - Right", access: "light" }
{ name: "Dark Maze - Top", access: "light" }
{ name: "Dark Maze - Bottom", access: "light" }
]
keys: 6
swamp_palace:
name: "Swamp Palace"
location: [ 752, 478 ]
enter: [ 'dw_south', 'mirror', 'flippers' ]
boss:
name: "Arrghus"
defeat: any: [ 'sword', 'hammer' ], all: 'hookshot'
access: 'hookshot'
afterBigKey: false
items: [
"Entrance"
# All the "hammer" access requirements are because the first flood control
# is behind mole-peg-things
{ name: "Big Chest", type: "big", access: 'hammer' }
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Map Chest", access: 'hammer' }
{ name: "West Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'hammer' }
{ name: "Flooded Room - Left", access: 'hammer' }
{ name: "Flooded Room - Right", access: 'hammer' }
{ name: "Waterfall Room", access: 'hammer' }
]
keys: 1
skull_woods:
name: "Skull Woods"
location: [ 531, 26 ]
enter: 'dw_nw'
boss:
name: "Mothula"
defeat: 'fire_rod'
access: 'cut_drapes'
afterBigKey: false
items: [
{ name: "Big Chest", type: "big", access: 'bombs' }
"Big Key Chest"
"Compass Chest"
"Map Chest"
{ name: "Bridge Room", access: "fire_rod" }
"Pot Prison"
"Pinball Room"
]
keys: 3
thieves_town:
name: "Thieves' Town"
location: [ 576, 249 ]
enter: [ 'dw_nw', 'glove' ]
boss:
name: "Blind"
defeat: any: [ 'sword', 'hammer', 'red_cane', 'blue_cane' ]
# This is pointless because there are bomb pots in the room, but whatever:
access: 'bombs'
items: [
{ name: "Attic", access: "flip_switch" }
"Big Key Chest"
"Map Chest"
"Compass Chest"
"Ambush Chest"
{ name: "Big Chest", type: "big", access: [ "flip_switch", "hammer" ] }
{ name: "Blind's Cell", access: "flip_switch" }
]
keys: 1
ice_palace:
name: "Ice Palace"
location: [ 920, 442 ]
enter: [ 'moon_pearl', 'flippers', 'titans_mitt', 'melt_ice' ]
boss:
name: "Kholdstare"
access: 'hammer'
defeat: [ 'melt_ice', 'weapon' ]
items: [
# Some of these access rules are basically useless since they're met by
# being able to access the palace at all. However they're kept on the
# off-chance this ever hooks in to the level where it can see magic
# levels and inventory capacity to deal with toggling magic items on and
# off.
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'defeat_penguins' }
{ name: "Map Chest", access: 'hammer' }
{ name: "Spike Room", access: 'defeat_penguins' }
{ name: "Freezor Chest", access: 'melt_ice' }
"Iced T Room"
{ name: "Big Chest", access: any: [ 'bombs', 'hookshot' ] }
]
keys: 2
misery_mire:
name: "Misery Mire"
location: [ 550, 441 ]
medallion: 'mire_medallion'
enter: [ 'dw_mm', 'use_mire_medallion', { any: [ 'hookshot', 'pegasus_boots' ] } ]
boss:
name: "Vitreous"
access: 'red_cane'
defeat: any: [ 'sword', 'bow_and_arrows' ]
items: [
{ name: "Big Chest", type: "big" }
"Main Lobby"
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Compass Chest", access: "light_torch" }
"Bridge Chest"
"Map Chest"
"Spike Chest"
]
keys: 3
turtle_rock:
name: "Turtle Rock"
location: [ 994, 43 ]
medallion: 'turtle_medallion'
enter: [ 'dw_dm_east', 'titans_mitt', 'hammer', 'use_turtle_medallion', 'red_cane' ]
boss:
name: 'Trinexx'
defeat: [ 'ice_rod', 'fire_rod', 'weapon' ]
access: [ 'red_cane', 'light' ]
items: [
{ name: "Chain Chomps", access: 'flip_switch_past_barrier' }
"Compass Chest"
{ name: "Roller Room - Left", access: 'fire_rod' }
{ name: "Roller Room - Right", access: 'fire_rod' }
{ name: "Big Chest", type: "big" }
"Big Key Chest"
{ name: "Crystaroller Room", access: 'flip_switch_past_barrier' }
{ name: "Eye Bridge - Bottom Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Bottom Right", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Right", access: 'avoid_laser' }
]
keys: 4
ganons_tower:
name: "Ganon's Tower"
# Note that this is "east" because "west" doesn't include the tower, despite
# the fact it's on the "west" side.
enter: [ 'dw_dm_east', 'all_crystals' ]
location: [ 800, 0 ]
boss:
name: "Agahnim 2"
prize: false
items: [
{ name: "Bob's Torch", type: "dash" },
{ name: "DMs Room - Top Left", access: 'gt_dms_room' },
{ name: "DMs Room - Top Right", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Left", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Right", access: 'gt_dms_room' },
{ name: "Randomizer Room - Top Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Top Right", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Right", access: 'gt_randomizer_room' },
{ name: "Firesnake Room", access: 'gt_basement_left' },
{ name: "Map Chest", access: any: [ 'hookshot', 'pegasus_boots' ], all: 'hammer' },
{ name: "Big Chest", type: "big", access: 'gt_basement_clear' },
"Hope Room - Left"
"Hope Room - Right"
{ name: "Bob's Chest", access: 'gt_basement_clear' },
{ name: "Tile Room", access: 'red_cane' },
{ name: "Compass Room - Top Left", access: 'gt_basement_right' },
{ name: "Compass Room - Top Right", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Left", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Right", access: 'gt_basement_right' },
{ name: "Big Key Chest", access: 'gt_big_key_room' },
{ name: "Big Key Room - Left", access: 'gt_big_key_room' },
{ name: "Big Key Room - Right", access: 'gt_big_key_room' },
{ name: "Mini Helmasaur Room - Left", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Mini Helmasaur Room - Right", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Pre-Moldorm Chest", access: [ 'bow_and_arrows', 'bombs' ], afterBigKey: true },
{ name: "Moldorm Chest", access: [ 'bow_and_arrows', 'hookshot', 'sword', 'bombs' ], afterBigKey: true }
]
keys: 4
slots:
# Slots just defines names for "default" slots that can then be referred to in
# the layout. It's also intended to allow certain modes to override slots
# (namely to allow swordless to make the sword blank).
#
# null indicates an empty space (nothing equipped).
#
# Note that these are not rules: "sword" is already a rule that means "has any
# sword" and "glove" is a rule for "has any glove."
sword:
name: 'Sword'
items: [ null, "sword_1", "sword_2", "sword_3", "sword_4" ]
armor:
name: 'Armor'
items: [ "green_jerkin", "blue_mail", "red_mail" ]
shield:
name: 'Shield'
items: [ null, "shield_1", "shield_2", "shield_3" ]
glove:
name: 'Gloves'
items: [ null, "power_glove", "titans_mitt" ]
bow_and_arrows:
name: 'Bow'
items: [ null, 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# This is used by dungeons to sort through possible entry medallions.
medallions:
name: 'Medallion'
items: [ 'bombos', 'ether', 'quake' ]
prizes:
# Prizes define what is awarded when a dungeon completes that is NOT the boss
# item drop. The order here is the order they're cycled through in the UI.
crystal: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_7' ]
superbomb_crystal: [ 'crystal_5', 'crystal_6' ]
green_pendant: [ 'green_pendant' ]
pendant: [ 'red_pendant', 'blue_pendant' ]
layout:
# Layout defines the default layout if nothing overrides it.
# Items defines the space that items are in. Items below are in "status
# screen" order, with items that would share a slot being in arrays.
# Note: "bow_and_arrows" is a "special" UI that pulls from "slots" for what it
# should contain by default.
items: [
[ "bow_and_arrows", [ "boomerang", "magic_boomerang" ], "hookshot", "bombs", [ "mushroom", "magic_powder" ] ]
[ "fire_rod", "ice_rod", "bombos", "ether", "quake" ]
[ "lamp", "hammer", [ "shovel", "flute" ], "net", "book" ]
[ "bottle", "red_cane", "blue_cane", "cape", "mirror" ]
]
# Equipment is simply a list of either slot IDs (which mask item IDs if they
# happen to share them) or item IDs (if an item is present or not).
equipment: [
"sword", "armor", "shield", "pegasus_boots", "glove", "flippers",
"moon_pearl"
]
# List of dungeons. Each key is actually treated as a new row, and multiple
# keys can be provided.
dungeons:
light: [
"hyrule_castle_escape"
"castle_tower"
"eastern_palace"
"desert_palace"
"heras_tower"
]
dark: [
"dark_palace"
"swamp_palace"
"skull_woods"
"thieves_town"
"ice_palace"
"misery_mire"
"turtle_rock"
"ganons_tower"
]
maps: [ "light-world", "dark-world" ]
footer: { }
defaults: [
"bombs"
]
# The various known logics.
logics:
standard:
# This is the generic logic. It changes nothing, but needs to exist so that
# users can select it.
name: "Standard"
# Changes for Swordless logic
swordless:
name: "Swordless"
rules:
use_medallion:
# In Swordless, the medallions can always be used within the logic
# defined in here (there are special Medallion Tiles)
requires: true
cut_drapes:
# Drapes don't need cutting in swordless.
requires: true
melt_ice:
# Swordless allows the Ice Palace to completed with just Bombos even
# without the hammer.
requires: any: [ 'fire_rod', 'bombos' ]
# Hammer isn't required to open dungeons
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: 'mire_medallion'
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: 'turtle_medallion'
slots:
# Override sword to not be selectable
sword: [ null ]
# Changes for retro logic
retro:
name: "Retro"
rules:
# Bow now needs a quiver to be useful
bow_and_arrows:
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ], all: 'rupee_quiver'
none:
name: 'No Logic'
defaults: [ 'bombs', 'pegasus_boots' ]
# Probably going to just special-case this rather than have it rewrite all the rules to true
version:
date: '2020-07-25'
alttpr: 'v31' | 146056 | # LTTP database. Note that this is "compiled" into a JavaScript file that is
# loaded.
# The items list is basically a list of names for collectibles. Other data is
# listed for them, but it's never used. These define the "base states" that
# rules are based on.
items:
sword_1:
name: "Fighter's Sword"
slot: "sword"
type: "equipment"
sword_2:
name: "Master Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_1"
sword_3:
name: "Tempered Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_2"
sword_4:
name: "Golden Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_3"
shield_1:
name: "Fighter's Shield"
slot: "shield"
type: "equipment"
shield_2:
name: "Red Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_1"
shield_3:
name: "Mirror Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_2"
green_jerkin:
name: "Green Jerkin"
slot: "armor"
type: "equipment"
"default": true
blue_mail:
name: "Blue Mail"
slot: "armor"
type: "equipment"
upgrades: "green_jerkin"
red_mail:
name: "Red Mail"
slot: "armor"
type: "equipment"
upgrades: "blue_mail"
pegasus_boots:
name: "Pegasus Shoes"
type: "equipment"
power_glove:
name: "Power Glove"
slot: "glove"
type: "equipment"
titans_mitt:
name: "Titan's Mitt"
slot: "glove"
type: "equipment"
upgrades: "power_glove"
flippers:
name: "Zora's Flippers"
type: "equipment"
moon_pearl:
name: "Moon Pearl"
type: "equipment"
# Just the bow - necessary for retro mode
bow:
name: "Bow"
# For retro mode:
rupee_quiver:
name: "Rupee Quiver"
# The bow with wooden arrows
bow_and_wooden_arrows:
name: "Bow & Wooden Arrows"
# The bow with silver arrows
bow_and_silver_arrows:
name: "Bow & Silver Arrows"
boomerang:
name: "Boomerang"
magic_boomerang:
name: "Magical Boomerang"
upgrades: "boomerang"
hookshot:
name: "Hookshot"
bombs:
name: "Bomb"
mushroom:
name: "Mushroom"
magic_powder:
name: "Magic Powder"
fire_rod:
name: "Fire Rod"
ice_rod:
name: "Ice Rod"
bombos:
name: "Bombos Medallion"
ether:
name: "Ether Medallion"
quake:
name: "Quake Medallion"
lamp:
name: "Lantern"
hammer:
name: "Magic Hammer"
shovel:
name: "Shovel"
flute:
name: "Flute"
upgrades: "shovel"
net:
name: "Bug-Catching Net"
book:
name: "Book of Mudora"
bottle:
name: "Bottle"
stacks: 4
# This isn't used but is listed anyway on the off chance it ever gets used:
contains: [
"Medicine of Life", "Medicine of Magic", "Medicine of Life and Magic",
"Fairy", "Bee", "Golden Bee"
]
red_cane:
name: "Cane of Somaria"
blue_cane:
name: "Cane of Byrna"
cape:
name: "Magic Cape"
mirror:
name: "Magic Mirror"
green_pendant:
name: "Pendant of Courage"
type: "pendant"
red_pendant:
name: "Pendant of Wisdom"
type: "pendant"
blue_pendant:
name: "Pendant of Power"
type: "pendant"
crystal_1:
name: "Crystal 1"
type: "crystal"
crystal_2:
name: "Crystal 2"
type: "crystal"
crystal_3:
name: "Crystal 3"
type: "crystal"
crystal_4:
name: "Crystal 4"
type: "crystal"
crystal_5:
name: "Crystal 5"
type: "crystal"
superbomb: true
crystal_6:
name: "Crystal 6"
type: "crystal"
superbomb: true
crystal_7:
name: "Crystal 7"
type: "crystal"
rules:
# RULES ######################################################################
# These define basic rules.
#
# Rule definitions are used just about everywhere, so here's the basics:
#
# * All rules exist in an environment. Each key for each rule and each key for
# each item and location exists in this environment and can be used in a
# rule.
# * A rule definition can contain any of the following:
# - A boolean (true, false) that sets the rule to always be that value
# - A string, that evaluates to whatever that string is in the environment
# - An object, that contains the following fields:
# * any: a list of things to logical OR together
# * all: a list of things to logical AND together
# The evaluated value of any and all will be ANDed together to create
# the final evaluated value of the rule.
# Each "thing" in that list can either be a string (to look up a
# variable) or itself a rule using the same definition.
# - An array, which is treated as if it were under the "all" key above.
#
# Each rule has a name and description. Currently the description is ignored.
# Names, if provided, are used when generating tooltips that reference the
# rule. Instead of reporting the subrule, the rule's name is used instead.
light:
# Really this is a meta-item. It exists to allow "light source" to be
# flagged separately from "fire source" for anyone who wants to attempt
# dark room navigation.
name: 'Light Source'
description: 'A source of light.'
requires: 'lamp'
sword:
name: "Any Sword"
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
master_sword:
name: "Master Sword (or better)"
# Also technically full health
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
bow_and_arrows:
name: "Bow & Arrows"
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# Something that can light a torch.
light_torch:
name: 'Light Torch'
description: 'The ability to light a torch.'
requires: any: [ 'lamp', 'fire_rod' ]
weapon:
name: 'Any Weapon'
description: 'Access to a weapon'
requires: any: [ 'sword', 'bow_and_arrows', 'bombs', 'fire_rod', 'hammer',
'red_cane', 'blue_cane' ]
swimming:
name: 'Swimming'
description: 'Either Flippers or accessible via Fake Flippers'
requires: 'flippers'
# Something that can melt ice, required for the Ice Palace.
melt_ice:
name: 'Melt Ice (Fire Rod/Usable Bombos)'
description: 'The ability to melt ice: specifically Freezors and Kholdstare.'
requires: any: [ 'fire_rod', 'use_bombos' ]
defeat_popo:
name: "Defeat Popo"
description: "Ability to defeat Popos"
requires: any: [ 'weapon', 'hookshot' ]
defeat_penguins:
name: "Defeat Penguins"
description: "Ability to defeat Penguins"
# Literally the same as above, but :shrug:
requires: any: [ 'weapon', 'hookshot' ]
avoid_laser:
name: "Avoid Eye Lasers"
description: "Please do not look directly at the eye laser."
requires: any: [ 'cape', 'blue_cane', 'shield_3' ]
# -- Use Medallions --
# These rules are to indicate a medallion may be used.
use_medallion:
name: "Use Medallion"
description: 'The ability to use a medallion.'
# In Swordless, this becomes just the hammer.
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
# This rule is effectively for replacing in Swordless.
cut_drapes:
name: "Cut Drapes"
requires: 'sword'
use_bombos:
name: "Use Bombos Medallion"
requires: [ 'use_medallion', 'bombos' ]
use_ether:
name: "Use Ether Medallion"
requires: [ 'use_medallion', 'ether' ]
use_quake:
name: "Use Quake Medallion"
requires: [ 'use_medallion', 'quake' ]
mire_medallion:
name: "Misery Mire Medallion"
# The Misery Mire medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
turtle_medallion:
name: "Turtle Rock Medallion"
# The Turtle Rock medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
# NOTE: You do NOT need anything to use the medallions for dungeon unlock
# purposes in swordless mode.
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: [ 'use_medallion', 'mire_medallion' ]
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: [ 'use_medallion', 'turtle_medallion' ]
flip_switch_past_barrier:
name: 'Flip Crystal Switch Past Barrier'
description: 'Something that can flip a crystal switch past a barrier.'
requires: any: [ 'boomerang', 'magic_boomerang', 'bombs', 'fire_rod',
'ice_rod', 'bow_and_arrows', 'red_cane', 'blue_cane'
]
flip_switch:
name: 'Flip Crystal Switch'
description: 'Something that can flip a crystal switch, assuming nothing is in the way.'
requires: any: [ 'flip_switch_past_barrier', 'sword', 'hookshot', 'hammer' ]
# Whatever's required to activate a pedestal. This is mainly here so that it
# can be replaced with a rule that says "hammer" in swordless runs.
pedestal_sword:
name: 'Pedestal Sword'
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
glove:
name: 'Power Glove (or better)'
requires: any: [ 'power_glove', 'titans_mitt' ]
break_weak_wall:
name: "Bombs/Pegasus Shoes"
requires: any: [ 'bombs', 'pegasus_boots' ]
all_crystals:
name: "All Crystals"
requires: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_5',
'crystal_6', 'crystal_7' ]
# Ganon's Tower rules
gt_dms_room:
name: "Ganon's Tower - DMs Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot' ]
gt_randomizer_room:
name: "Ganon's Tower - Randomizer Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot', 'bombs' ]
gt_basement_left:
name: "Ganon's Tower - Left Side"
description: "Left side of the basement of Ganon's Tower"
requires: any: [ 'red_cane', 'bombs' ], all: [ 'hookshot', 'hammer' ]
gt_basement_right:
name: "Ganon's Tower - Right Side"
description: "Right side of the basement of Ganon's Tower"
requires: [ 'fire_rod', 'red_cane' ]
gt_big_key_room:
name: "Ganon's Tower - Big Key Room"
description: "The room with the Big Key in Ganon's Tower"
requires: [ 'gt_basement_clear', 'bombs' ]
gt_basement_clear:
name: "Ganon's Tower - Basement"
description: "Entering the common area with the Big Chest"
requires: any: [ 'gt_basement_left', 'gt_basement_right' ]
# REGIONS ######################################################################
#
# Regions define general areas of the map.
regions:
light_world:
name: "Light World"
points: "0,0 191,0 191,128 355,128"
dm_west:
name: "Death Mountain (West)"
requires: any: [ [ 'light', 'glove' ], 'flute' ]
dm_east:
name: "Death Mountain (East)"
requires: [ 'dm_west', { any: [ 'hookshot', [ 'hammer', 'mirror' ] ] } ]
turtle_rock_top:
name: 'Turtle Rock Medallion Platform'
# Access to the medallion platform.
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
dark_world:
name: 'Dark World'
# "The area of the Dark World you're supposed to start in" - this does NOT
# include the Moon Pearl. This rule gets crazy fast.
requires: any: [
# Simple enough: Defeat Agahnim, create portal
'castle_portal',
# Also simple: go through the portal near Flute Spot 5
[ 'eastern_portal', 'moon_pearl' ],
# Somewhat more complicated: go through Kakariko using the Titan's Mitt
# and have the flippers, allowing you to swim there.
[ 'moon_pearl', 'titans_mitt', 'flippers' ]
]
dw_nw:
name: 'Dark World (Northwest)'
# Basically "Theives Town and surroundings." Also includes all of Skull
# Woods. Note that the Moon Pearl is required to leave the starting area.
requires: any: [
'kakariko_portal',
# Access is also possible with any way to the general Dark World + the
# hookshot.
[ 'dark_world', 'moon_pearl', 'hookshot' ] ]
dw_nw_mirror:
name: 'Dark World (Northwest) and Magic Mirror'
requires: [ 'dw_nw', 'mirror' ]
dw_south:
name: 'Dark World (South)'
# The portion of the Dark World that's south of Thieve's Town and south
# of the Pyramid of Power. Always accessible if you can get to dw_nw
# (you can just drop down a ledge), or if you have Dark World access and the
# Magic Hammer.
requires: any: [ [ 'dark_world', 'hammer' ], 'dw_nw' ]
dw_mm:
name: 'Misery Mire (exterior)'
requires: [ 'titans_mitt', 'flute' ]
dw_mm_mirror:
name: 'Misery Mire (exterior) with Magic Mirror'
requires: [ 'dw_mm', 'mirror' ]
dw_dm_west:
name: 'Dark World Death Mountain (West)'
# The area you can get to as Bunny Link (note: does NOT include Ganon's
# Tower)
requires: 'dm_west'
dw_dm_east:
name: 'Dark World Death Mountain (East)'
# Basically the rest of Death Mountain from above. Also covers the
# Turtle Rock entrance.
requires: [ 'dm_east', 'titans_mitt' ]
locations:
# Locations are points on the map. They either indicate places that items can
# be retrieved or other points of interest.
#
# Locations have the following fields:
# name - the human-readable name of the location
# required - a rule defining the requirements for accessing the location,
# defaults to true (always available)
# visible - a rule defining what is required for a location to be "visible"
# which really only means "can see the item there" which defaults to
# false
# location - set of x,y coordinates for a location. Indicates where the
# location is on the 1024x512 map, where x between 0-511 are in the
# Light World and x between 512-1023 are in the Dark World. (Note that
# this makes them effectively tiles.)
# LIGHT WORLD LOCATIONS ######################################################
## DARK WORLD TELEPORTS ##
# These are not item locations, just areas where you can teleport to the
# Dark World from the Light World.
kakariko_portal:
name: 'Kakariko Village Portal'
type: 'portal'
# This portal is only useful with the Moon Pearl.
requires: any: [ 'titans_mitt', [ 'glove', 'hammer' ] ], all: 'moon_pearl'
location: [ 46, 172 ]
castle_portal:
name: 'Hyrule Castle Gate Portal'
# This is pretty much the only portal that doesn't instantly require the
# Moon Pearl
type: 'portal'
requires: 'castle_tower.cleared'
location: [ 256, 269 ]
eastern_portal:
name: 'Eastern Palace Portal (Flute 5)'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 498, 366 ]
swamp_portal:
name: 'Swamp Portal'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 232, 394 ]
desert_portal:
name: 'Desert Portal'
type: 'portal'
# Strictly speaking you don't instantly require the Moon Pearl for this one.
requires: [ 'flute', 'titans_mitt' ]
location: [ 20, 502 ]
ice_portal:
name: 'Ice Palace Portal'
type: 'portal'
# Strictly speaking you don't instantly dead-end without the Moon Pearl.
# Instead you dead-end after entering the palace and being unable to kill
# the Freezor.
requires: [ 'swimming', 'titans_mitt', 'moon_pearl' ]
location: [ 406, 444 ]
# It exists, technically:
spectacle_rock_portal:
name: "Spectacle Rock Portal"
type: 'portal'
requires: 'dm_west'
location: [ 288, 46 ]
dm_east_portal:
name: "Death Mountain Eastern Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt' ]
location: [ 394, 116 ]
turtle_rock_portal:
name: "Turtle Rock Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
location: [ 480, 28 ]
pedestal:
name: '<NAME>'
requires: [ 'red_pendant', 'blue_pendant', 'green_pendant' ]
visible: 'book'
location: [ 21, 26 ]
forest_mushroom:
name: 'Mushroom'
location: [ 61, 44 ]
thieves_cave:
name: "<NAME>"
location: [ 97, 67 ]
lumberjack_cave:
name: '<NAME>'
requires: [ 'castle_tower.cleared', 'pegasus_boots' ]
visible: true
location: [ 154, 37 ]
ether_pedestal:
name: 'Ether Pedestal'
requires: [ 'dm_west', 'pedestal_sword', 'book' ]
visible: [ 'dm_west', 'book' ]
location: [ 215, 9 ]
old_man:
name: 'Lost Old Man'
requires: [ 'dm_west', 'light' ]
location: [ 229, 119 ]
spectacle_rock_cave:
name: 'Spectacle Rock Cave'
requires: 'dm_west'
location: [ 250, 74 ]
spectacle_rock:
name: 'Spectacle Rock'
requires: [ 'dm_west', 'mirror' ]
visible: 'dm_west'
location: [ 260, 44 ]
floating_island:
name: 'Floating Island'
requires: [ 'dw_dm_east', 'moon_pearl', 'bombs', 'mirror' ]
visible: 'dm_east'
location: [ 417, 12 ]
spiral_cave:
name: 'Spiral Cave'
requires: 'dm_east'
location: [ 408, 46 ]
mimic_cave:
name: 'Mimic Cave'
requires: [ 'moon_pearl', 'mirror', 'use_turtle_medallion', 'dw_dm_east' ]
location: [ 431, 47 ]
paradox_cave_basic:
name: 'Paradox Cave (no bombs)'
items: 5
requires: 'dm_east'
paradox_cave_bombs:
name: 'Paradox Cave (behind bombable wall)'
items: 2
requires: [ 'dm_east', 'bombs' ]
paradox_cave:
name: 'Paradox Cave'
merge: [ 'paradox_cave_basic', 'paradox_cave_bombs' ]
location: [ 441, 110 ]
zora:
name: 'Zora'
requires: any: [ 'glove', 'flippers' ]
rupees: 500
zora_ledge:
name: "Zora's Domain Ledge"
requires: 'flippers'
# If you can get into Zora's Domain, you can see what's on the ledge
visible: 'glove'
zoras_domain:
name: "Zora's Domain"
merge: [ "zora", "zora_ledge" ]
#location: [ 490, 64 ] - intentionally lowered as otherwise it looks like
# it was on Death Mountain
location: [ 490, 72 ]
behind_waterfall:
name: 'Waterfall Fairy'
requires: 'flippers'
items: 2
location: [ 460, 70 ]
well_floor:
name: 'Well Floor'
items: 4
well_cave:
name: 'Well Cave'
requires: 'bombs'
well:
name: 'Well'
merge: [ 'well_floor', 'well_cave' ]
location: [ 11, 217 ]
blinds_block_puzzle:
name: "Blind's House (block puzzle)"
items: 4
blinds_secret:
name: "Blind's House (bombable wall)"
requires: 'bombs'
blinds_house:
name: "Blind's House"
merge: [ 'blinds_block_puzzle', 'blinds_secret' ]
location: [ 65, 215 ]
bottle_vendor:
name: 'Bottle Vendor'
rupees: 100
location: [ 48, 238 ]
chicken_house:
name: 'Chicken House'
requires: 'bombs'
location: [ 50, 277 ]
sick_kid:
name: '<NAME>',
requires: 'bottle'
location: [ 79, 274 ]
tavern:
name: '<NAME>'
location: [ 81, 290 ]
blacksmiths:
name: '<NAME>'
# Does NOT require the mirror: the guy will follow after save and quit
requires: [ 'dw_nw', 'titans_mitt' ]
location: [ 154, 273 ]
magic_bat:
name: 'Magic Bat'
requires: any: [ 'hammer', 'dw_nw_mirror' ], all: 'magic_powder'
location: [ 165, 287 ]
sanctuary_bonk_rocks:
name: "Sanctuary Bonk Rocks"
requires: 'pegasus_boots'
location: [ 199, 149 ]
sanctuary:
name: 'Sanctuary'
location: [ 235, 136 ]
castle_graveyard:
name: 'Castle Under Graveyard'
items: 3
requires: 'break_weak_wall'
location: [ 265, 149 ]
graveyard_cave:
name: 'Graveyard Cave'
requires: [ 'dw_nw', 'mirror' ]
location: [ 291, 140 ]
kings_tomb:
name: "King's Tomb"
requires: [ 'pegasus_boots', { any: [ 'titans_mitt', [ 'dw_nw', 'mirror' ] ] } ]
location: [ 307, 151 ]
secret_passage:
name: "Secret Passage (Uncle)"
items: 2
location: [ 304, 212 ]
witch:
name: "Witch's Hut"
requires: 'mushroom'
location: [ 409, 171 ]
sahasrala:
name: 'Sahasrala'
requires: 'green_pendant'
location: [ 414, 236 ]
sahasralas_hut:
name: "Sahasrala's Hut"
requires: 'break_weak_wall'
location: [ 414, 220 ]
race_game:
name: "15 Second Game"
requires: 'break_weak_wall'
location: [ 49, 368 ]
library:
name: "Library"
requires: 'pegasus_boots'
visible: true
location: [ 79, 336 ]
haunted_grove:
name: "Haunted Grove"
requires: 'shovel'
location: [ 146, 338 ]
links_house:
name: "Link's House"
location: [ 279, 352 ]
desert_cliff:
name: "Des<NAME>"
requires: any: [ 'book', 'dw_mm_mirror' ]
visible: true
location: [ 12, 464 ]
checkboard_cave:
name: "Checkerboard Cave"
requires: [ "dw_mm", "mirror" ]
location: [ 90, 397 ]
aginahs_cave:
name: "Aginah's Cave"
requires: 'bombs'
location: [ 102, 422 ]
south_of_grove_cave:
name: 'Cave South of Haunted Grove'
requires: [ 'dw_south', 'mirror' ]
location: [ 136, 422 ]
bombos_pedestal:
name: "Bombos Pedestal"
requires: [ "book", "dw_south", "mirror", "pedestal_sword" ]
visible: [ "book", "dw_south", "mirror" ]
location: [ 112, 470 ]
dam:
name: "Dam"
items: 2
location: [ 240, 478 ]
mini_moldorm_cave:
name: "Mini Moldorm Cave"
items: 5
requires: "bombs"
location: [ 334, 480 ]
hylia_island:
name: "Hylia Island"
requires: [ "dw_south", "moon_pearl", "flippers", "mirror" ]
# You can't actually see it from shore
visible: 'swimming'
location: [ 373, 426 ]
ice_rod_cave:
name: "Ice Rod Cave"
requires: "bombs"
location: [ 458, 395 ]
hobo:
name: 'Hobo Under the Bridge'
requires: 'swimming'
location: [ 358, 354 ]
# DARK WORLD LOCATIONS #######################################################
bumper_cave:
name: 'Bumper Cave'
requires: [ 'dw_nw', 'moon_pearl', 'cape' ]
visible: 'dw_nw'
location: [ 695, 77 ]
spike_cave:
name: 'Spike Cave'
requires: [ 'moon_pearl', 'hammer', 'dw_dm_west' ]
location: [ 805, 73 ]
hookshot_cave_full:
name: 'Hookshot Cave (three chests)'
requires: [ 'moon_pearl', 'dw_dm_east', 'glove', 'hookshot' ]
items: 3
hookshot_cave_bonk:
name: 'Hookshot Cave (bottom chest)'
requires: any: [ 'pegasus_boots', 'hookshot' ], all: [ 'moon_pearl', 'dw_dm_east', 'glove' ]
hookshot_cave:
name: 'Hookshot Cave'
merge: [ 'hookshot_cave_full', 'hookshot_cave_bonk' ]
location: [ 938, 33 ]
death_mountain_cave:
name: 'Death Mountain Cave'
items: 2
requires: [ 'dw_dm_east', 'moon_pearl' ]
location: [ 943, 74 ]
catfish:
name: 'Catfish'
requires: [ 'dark_world', 'moon_pearl', 'glove' ]
location: [ 969, 88 ]
chest_game:
name: 'Chest Game'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 537, 239 ]
doorless_hut:
name: 'Doorless Hut'
requires: [ 'moon_pearl', 'dw_nw', 'bombs' ]
location: [ 568, 298 ]
c_house:
name: 'C-Shaped House'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 618, 246 ]
peg_cave:
name: 'Peg Cave'
# Don't bother listing dw_nw as a prereq because the titans_mitt gets you
# there anyway
requires: [ 'moon_pearl', 'titans_mitt', 'hammer' ]
location: [ 674, 309 ]
purple_chest:
name: 'Locked Chest'
# This is the only item that depends on a different location. Weird!
requires: [ 'blacksmiths' ]
location: [ 666, 272 ]
fat_fairy:
name: 'Fat Fairy'
requires: [ 'moon_pearl', 'superbomb', 'dark_world' ]
location: [ 752, 248 ]
pyramid_ledge:
name: 'Pyramid Ledge'
requires: 'dark_world'
location: [ 808, 230 ]
digging_game:
name: 'Digging Game'
requires: [ 'dw_south', 'moon_pearl' ]
rupees: 80
location: [ 540, 354 ]
stumpy:
name: 'Flute Kid'
requires: [ 'dw_south', 'moon_pearl' ]
location: [ 669, 350 ]
mini_myre_cave:
name: 'Mini Myre Cave'
requires: 'dw_mm'
items: 2
location: [ 531, 409 ]
hype_cave:
name: 'Hype Cave'
items: 5
requires: [ 'dw_south', 'moon_pearl', 'bombs' ]
location: [ 818, 399 ]
superbomb:
name: '<NAME>'
type: 'superbomb'
rupees: 100
location: [ 791, 352 ]
requires: [ 'dw_south', 'moon_pearl', 'crystal_5', 'crystal_6' ]
dungeons:
# These are almost locations, but with a few extra fields.
#
# enter - rule for requirements to (meaningfully) enter the dungeon
# (meaningfully in this case basically means "and get past the first
# room" - specific examples are flippers for Swamp Palace and a way to
# defeat the Freezor in the Ice Palace)
# boss - information about the boss:
# name - the boss's name
# defeat - requirements for defeating the boss
# access - requirements for even accessing the boss (if any)
# items - array describing all items within the dungeon. Names should be
# the names as defined in the spoiler file.
# keys - number of keys that are in the random shuffle
# notInPool - if given, a list (or single string) of generic dungeon items
# that happen to be missing from the item pool and should not count against
# chests
hyrule_castle_escape:
name: "Hyrule Castle Escape"
# This currently has no location and is therefore not shown on the map.
items: [
# Listed elsewhere:
# "Sanctuary"
# { name: "Secret Room - Left", access: 'break_weak_wall' }
# { name: "Secret Room - Middle", access: 'break_weak_wall' }
# { name: "Secret Room - Right", access: 'break_weak_wall' }
{ name: "Dark Cross", access: 'light' }
{ name: "Boomerang Chest", access: 'weapon' }
{ name: "Map Chest", access: 'weapon' }
{ name: "Zelda's Cell", access: 'weapon' }
# Also listed elsewhere:
# { name: "Link's Uncle", type: "uncle" }
# "Secret Passage"
]
keys: 1
# Compass not only isn't in the pool, it doesn't exist.
notInPool: [ 'compass', 'bigKey' ]
eastern_palace:
name: "Eastern Palace"
location: [ 490, 198 ]
boss:
name: "<NAME>"
# Armos don't require the bow. However, there is a Red Eyegore blocking
# access to them, and it DOES require the bow.
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'bow_and_arrows', 'light' ]
items: [
"Compass Chest"
{ name: "Big Chest", type: "big" }
"Cannonball Chest"
{ name: "Big Key Chest", access: "light" }
"Map Chest"
]
keys: 0
desert_palace:
name: "Desert Pal<NAME>"
location: [ 37, 408 ]
enter: any: [ 'book', 'dw_mm_mirror' ]
boss:
name: "<NAME>"
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'light_torch', 'glove' ]
items: [
{ name: "Big Chest", type: "big" }
"Map Chest"
{ name: "Torch", type: "dash" }
{ name: "Big Key Chest", access: "defeat_popo" }
"Compass Chest"
]
keys: 1
heras_tower:
name: "<NAME>"
location: [ 287, 16 ]
enter: any: [ [ 'hookshot', 'hammer'], 'mirror' ], all: [ 'dm_west', 'flip_switch' ]
boss:
name: "<NAME>"
defeat: any: [ "sword", "hammer" ]
items: [
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Basement Cage", type: "standing" }
"Map Chest"
{ name: "Compass Chest", afterBigKey: true }
{ name: "Big Chest", type: "big" }
]
keys: 1
castle_tower:
name: "Castle Tower"
location: [ 255, 203 ]
enter: any: [ 'master_sword', 'cape' ]
boss:
name: "<NAME>"
prize: false
defeat: any: [ 'sword', 'net' ]
access: [ 'cut_drapes', 'light' ]
afterBigKey: false
items: [
{ name: "Castle Tower - Room 03", access: "weapon" }
{ name: "Castle Tower - Dark Maze", access: [ "light", "weapon" ] }
]
keys: 2
notInPool: [ 'map', 'compass', 'bigKey' ]
dark_palace:
name: "Palace of Darkness"
location: [ 1002, 200 ]
# Also requires 110 rupees
enter: [ 'dark_world', 'moon_pearl' ]
boss:
name: "<NAME>"
# I'm a bit unclear on what weapons are useful against him, but the
# hammer/bow and arrow requirement for getting there means you can
# definitely defeat him.
defeat: [ 'sword', 'bow_and_arrows' ]
access: [ 'light', 'hammer', 'bow_and_arrows' ]
items: [
"Shooter Room"
{ name: "Big Key Chest", access: "bombs" }
{ name: "The Arena - Ledge", access: [ "bow_and_arrows", "bombs" ] }
"The Arena - Bridge" # Technically behind a key
"Stalfos Basement" # Also behind a key
"Map Chest"
{ "Big Chest", type: "big", access: [ "light", "bombs" ] }
"Compass Chest" # Not sure how many keys are needed
"Harmless Hellway"
{ name: "Dark Basement - Left", access: "light" }
{ name: "Dark Basement - Right", access: "light" }
{ name: "Dark Maze - Top", access: "light" }
{ name: "Dark Maze - Bottom", access: "light" }
]
keys: 6
swamp_palace:
name: "Swamp Palace"
location: [ 752, 478 ]
enter: [ 'dw_south', 'mirror', 'flippers' ]
boss:
name: "<NAME>"
defeat: any: [ 'sword', 'hammer' ], all: 'hookshot'
access: 'hookshot'
afterBigKey: false
items: [
"Entrance"
# All the "hammer" access requirements are because the first flood control
# is behind mole-peg-things
{ name: "Big Chest", type: "big", access: 'hammer' }
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Map Chest", access: 'hammer' }
{ name: "West Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'hammer' }
{ name: "Flooded Room - Left", access: 'hammer' }
{ name: "Flooded Room - Right", access: 'hammer' }
{ name: "Waterfall Room", access: 'hammer' }
]
keys: 1
skull_woods:
name: "<NAME>"
location: [ 531, 26 ]
enter: 'dw_nw'
boss:
name: "<NAME>"
defeat: 'fire_rod'
access: 'cut_drapes'
afterBigKey: false
items: [
{ name: "Big Chest", type: "big", access: 'bombs' }
"Big Key Chest"
"Compass Chest"
"Map Chest"
{ name: "Bridge Room", access: "fire_rod" }
"Pot Prison"
"Pinball Room"
]
keys: 3
thieves_town:
name: "Thieves' Town"
location: [ 576, 249 ]
enter: [ 'dw_nw', 'glove' ]
boss:
name: "<NAME>"
defeat: any: [ 'sword', 'hammer', 'red_cane', 'blue_cane' ]
# This is pointless because there are bomb pots in the room, but whatever:
access: 'bombs'
items: [
{ name: "Attic", access: "flip_switch" }
"Big Key Chest"
"Map Chest"
"Compass Chest"
"Ambush Chest"
{ name: "Big Chest", type: "big", access: [ "flip_switch", "hammer" ] }
{ name: "Blind's Cell", access: "flip_switch" }
]
keys: 1
ice_palace:
name: "<NAME>"
location: [ 920, 442 ]
enter: [ 'moon_pearl', 'flippers', 'titans_mitt', 'melt_ice' ]
boss:
name: "<NAME>"
access: 'hammer'
defeat: [ 'melt_ice', 'weapon' ]
items: [
# Some of these access rules are basically useless since they're met by
# being able to access the palace at all. However they're kept on the
# off-chance this ever hooks in to the level where it can see magic
# levels and inventory capacity to deal with toggling magic items on and
# off.
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'defeat_penguins' }
{ name: "Map Chest", access: 'hammer' }
{ name: "Spike Room", access: 'defeat_penguins' }
{ name: "Freezor Chest", access: 'melt_ice' }
"Iced T Room"
{ name: "Big Chest", access: any: [ 'bombs', 'hookshot' ] }
]
keys: 2
misery_mire:
name: "<NAME>"
location: [ 550, 441 ]
medallion: 'mire_medallion'
enter: [ 'dw_mm', 'use_mire_medallion', { any: [ 'hookshot', 'pegasus_boots' ] } ]
boss:
name: "<NAME>"
access: 'red_cane'
defeat: any: [ 'sword', 'bow_and_arrows' ]
items: [
{ name: "Big Chest", type: "big" }
"Main Lobby"
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Compass Chest", access: "light_torch" }
"Bridge Chest"
"Map Chest"
"Spike Chest"
]
keys: 3
turtle_rock:
name: "Turtle Rock"
location: [ 994, 43 ]
medallion: 'turtle_medallion'
enter: [ 'dw_dm_east', 'titans_mitt', 'hammer', 'use_turtle_medallion', 'red_cane' ]
boss:
name: '<NAME>'
defeat: [ 'ice_rod', 'fire_rod', 'weapon' ]
access: [ 'red_cane', 'light' ]
items: [
{ name: "Chain Chomps", access: 'flip_switch_past_barrier' }
"Compass Chest"
{ name: "Roller Room - Left", access: 'fire_rod' }
{ name: "Roller Room - Right", access: 'fire_rod' }
{ name: "Big Chest", type: "big" }
"Big Key Chest"
{ name: "Crystaroller Room", access: 'flip_switch_past_barrier' }
{ name: "Eye Bridge - Bottom Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Bottom Right", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Right", access: 'avoid_laser' }
]
keys: 4
ganons_tower:
name: "<NAME>anon's Tower"
# Note that this is "east" because "west" doesn't include the tower, despite
# the fact it's on the "west" side.
enter: [ 'dw_dm_east', 'all_crystals' ]
location: [ 800, 0 ]
boss:
name: "<NAME>"
prize: false
items: [
{ name: "<NAME>", type: "dash" },
{ name: "DMs Room - Top Left", access: 'gt_dms_room' },
{ name: "DMs Room - Top Right", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Left", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Right", access: 'gt_dms_room' },
{ name: "Randomizer Room - Top Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Top Right", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Right", access: 'gt_randomizer_room' },
{ name: "Firesnake Room", access: 'gt_basement_left' },
{ name: "Map Chest", access: any: [ 'hookshot', 'pegasus_boots' ], all: 'hammer' },
{ name: "Big Chest", type: "big", access: 'gt_basement_clear' },
"Hope Room - Left"
"Hope Room - Right"
{ name: "Bob'<NAME> Chest", access: 'gt_basement_clear' },
{ name: "Tile Room", access: 'red_cane' },
{ name: "Compass Room - Top Left", access: 'gt_basement_right' },
{ name: "Compass Room - Top Right", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Left", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Right", access: 'gt_basement_right' },
{ name: "Big Key Chest", access: 'gt_big_key_room' },
{ name: "Big Key Room - Left", access: 'gt_big_key_room' },
{ name: "Big Key Room - Right", access: 'gt_big_key_room' },
{ name: "Mini Helmasaur Room - Left", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Mini Helmasaur Room - Right", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Pre-Moldorm Chest", access: [ 'bow_and_arrows', 'bombs' ], afterBigKey: true },
{ name: "Moldorm Chest", access: [ 'bow_and_arrows', 'hookshot', 'sword', 'bombs' ], afterBigKey: true }
]
keys: 4
slots:
# Slots just defines names for "default" slots that can then be referred to in
# the layout. It's also intended to allow certain modes to override slots
# (namely to allow swordless to make the sword blank).
#
# null indicates an empty space (nothing equipped).
#
# Note that these are not rules: "sword" is already a rule that means "has any
# sword" and "glove" is a rule for "has any glove."
sword:
name: 'Sword'
items: [ null, "sword_1", "sword_2", "sword_3", "sword_4" ]
armor:
name: 'Armor'
items: [ "green_jerkin", "blue_mail", "red_mail" ]
shield:
name: 'Shield'
items: [ null, "shield_1", "shield_2", "shield_3" ]
glove:
name: 'Gloves'
items: [ null, "power_glove", "titans_mitt" ]
bow_and_arrows:
name: 'Bow'
items: [ null, 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# This is used by dungeons to sort through possible entry medallions.
medallions:
name: 'Medallion'
items: [ 'bombos', 'ether', 'quake' ]
prizes:
# Prizes define what is awarded when a dungeon completes that is NOT the boss
# item drop. The order here is the order they're cycled through in the UI.
crystal: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_7' ]
superbomb_crystal: [ 'crystal_5', 'crystal_6' ]
green_pendant: [ 'green_pendant' ]
pendant: [ 'red_pendant', 'blue_pendant' ]
layout:
# Layout defines the default layout if nothing overrides it.
# Items defines the space that items are in. Items below are in "status
# screen" order, with items that would share a slot being in arrays.
# Note: "bow_and_arrows" is a "special" UI that pulls from "slots" for what it
# should contain by default.
items: [
[ "bow_and_arrows", [ "boomerang", "magic_boomerang" ], "hookshot", "bombs", [ "mushroom", "magic_powder" ] ]
[ "fire_rod", "ice_rod", "bombos", "ether", "quake" ]
[ "lamp", "hammer", [ "shovel", "flute" ], "net", "book" ]
[ "bottle", "red_cane", "blue_cane", "cape", "mirror" ]
]
# Equipment is simply a list of either slot IDs (which mask item IDs if they
# happen to share them) or item IDs (if an item is present or not).
equipment: [
"sword", "armor", "shield", "pegasus_boots", "glove", "flippers",
"moon_pearl"
]
# List of dungeons. Each key is actually treated as a new row, and multiple
# keys can be provided.
dungeons:
light: [
"hyrule_castle_escape"
"castle_tower"
"eastern_palace"
"desert_palace"
"heras_tower"
]
dark: [
"dark_palace"
"swamp_palace"
"skull_woods"
"thieves_town"
"ice_palace"
"misery_mire"
"turtle_rock"
"ganons_tower"
]
maps: [ "light-world", "dark-world" ]
footer: { }
defaults: [
"bombs"
]
# The various known logics.
logics:
standard:
# This is the generic logic. It changes nothing, but needs to exist so that
# users can select it.
name: "Standard"
# Changes for Swordless logic
swordless:
name: "Swordless"
rules:
use_medallion:
# In Swordless, the medallions can always be used within the logic
# defined in here (there are special Medallion Tiles)
requires: true
cut_drapes:
# Drapes don't need cutting in swordless.
requires: true
melt_ice:
# Swordless allows the Ice Palace to completed with just Bombos even
# without the hammer.
requires: any: [ 'fire_rod', 'bombos' ]
# Hammer isn't required to open dungeons
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: 'mire_medallion'
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: 'turtle_medallion'
slots:
# Override sword to not be selectable
sword: [ null ]
# Changes for retro logic
retro:
name: "Retro"
rules:
# Bow now needs a quiver to be useful
bow_and_arrows:
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ], all: 'rupee_quiver'
none:
name: 'No Logic'
defaults: [ 'bombs', 'pegasus_boots' ]
# Probably going to just special-case this rather than have it rewrite all the rules to true
version:
date: '2020-07-25'
alttpr: 'v31' | true | # LTTP database. Note that this is "compiled" into a JavaScript file that is
# loaded.
# The items list is basically a list of names for collectibles. Other data is
# listed for them, but it's never used. These define the "base states" that
# rules are based on.
items:
sword_1:
name: "Fighter's Sword"
slot: "sword"
type: "equipment"
sword_2:
name: "Master Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_1"
sword_3:
name: "Tempered Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_2"
sword_4:
name: "Golden Sword"
slot: "sword"
type: "equipment"
upgrades: "sword_3"
shield_1:
name: "Fighter's Shield"
slot: "shield"
type: "equipment"
shield_2:
name: "Red Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_1"
shield_3:
name: "Mirror Shield"
slot: "shield"
type: "equipment"
upgrades: "shield_2"
green_jerkin:
name: "Green Jerkin"
slot: "armor"
type: "equipment"
"default": true
blue_mail:
name: "Blue Mail"
slot: "armor"
type: "equipment"
upgrades: "green_jerkin"
red_mail:
name: "Red Mail"
slot: "armor"
type: "equipment"
upgrades: "blue_mail"
pegasus_boots:
name: "Pegasus Shoes"
type: "equipment"
power_glove:
name: "Power Glove"
slot: "glove"
type: "equipment"
titans_mitt:
name: "Titan's Mitt"
slot: "glove"
type: "equipment"
upgrades: "power_glove"
flippers:
name: "Zora's Flippers"
type: "equipment"
moon_pearl:
name: "Moon Pearl"
type: "equipment"
# Just the bow - necessary for retro mode
bow:
name: "Bow"
# For retro mode:
rupee_quiver:
name: "Rupee Quiver"
# The bow with wooden arrows
bow_and_wooden_arrows:
name: "Bow & Wooden Arrows"
# The bow with silver arrows
bow_and_silver_arrows:
name: "Bow & Silver Arrows"
boomerang:
name: "Boomerang"
magic_boomerang:
name: "Magical Boomerang"
upgrades: "boomerang"
hookshot:
name: "Hookshot"
bombs:
name: "Bomb"
mushroom:
name: "Mushroom"
magic_powder:
name: "Magic Powder"
fire_rod:
name: "Fire Rod"
ice_rod:
name: "Ice Rod"
bombos:
name: "Bombos Medallion"
ether:
name: "Ether Medallion"
quake:
name: "Quake Medallion"
lamp:
name: "Lantern"
hammer:
name: "Magic Hammer"
shovel:
name: "Shovel"
flute:
name: "Flute"
upgrades: "shovel"
net:
name: "Bug-Catching Net"
book:
name: "Book of Mudora"
bottle:
name: "Bottle"
stacks: 4
# This isn't used but is listed anyway on the off chance it ever gets used:
contains: [
"Medicine of Life", "Medicine of Magic", "Medicine of Life and Magic",
"Fairy", "Bee", "Golden Bee"
]
red_cane:
name: "Cane of Somaria"
blue_cane:
name: "Cane of Byrna"
cape:
name: "Magic Cape"
mirror:
name: "Magic Mirror"
green_pendant:
name: "Pendant of Courage"
type: "pendant"
red_pendant:
name: "Pendant of Wisdom"
type: "pendant"
blue_pendant:
name: "Pendant of Power"
type: "pendant"
crystal_1:
name: "Crystal 1"
type: "crystal"
crystal_2:
name: "Crystal 2"
type: "crystal"
crystal_3:
name: "Crystal 3"
type: "crystal"
crystal_4:
name: "Crystal 4"
type: "crystal"
crystal_5:
name: "Crystal 5"
type: "crystal"
superbomb: true
crystal_6:
name: "Crystal 6"
type: "crystal"
superbomb: true
crystal_7:
name: "Crystal 7"
type: "crystal"
rules:
# RULES ######################################################################
# These define basic rules.
#
# Rule definitions are used just about everywhere, so here's the basics:
#
# * All rules exist in an environment. Each key for each rule and each key for
# each item and location exists in this environment and can be used in a
# rule.
# * A rule definition can contain any of the following:
# - A boolean (true, false) that sets the rule to always be that value
# - A string, that evaluates to whatever that string is in the environment
# - An object, that contains the following fields:
# * any: a list of things to logical OR together
# * all: a list of things to logical AND together
# The evaluated value of any and all will be ANDed together to create
# the final evaluated value of the rule.
# Each "thing" in that list can either be a string (to look up a
# variable) or itself a rule using the same definition.
# - An array, which is treated as if it were under the "all" key above.
#
# Each rule has a name and description. Currently the description is ignored.
# Names, if provided, are used when generating tooltips that reference the
# rule. Instead of reporting the subrule, the rule's name is used instead.
light:
# Really this is a meta-item. It exists to allow "light source" to be
# flagged separately from "fire source" for anyone who wants to attempt
# dark room navigation.
name: 'Light Source'
description: 'A source of light.'
requires: 'lamp'
sword:
name: "Any Sword"
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
master_sword:
name: "Master Sword (or better)"
# Also technically full health
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
bow_and_arrows:
name: "Bow & Arrows"
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# Something that can light a torch.
light_torch:
name: 'Light Torch'
description: 'The ability to light a torch.'
requires: any: [ 'lamp', 'fire_rod' ]
weapon:
name: 'Any Weapon'
description: 'Access to a weapon'
requires: any: [ 'sword', 'bow_and_arrows', 'bombs', 'fire_rod', 'hammer',
'red_cane', 'blue_cane' ]
swimming:
name: 'Swimming'
description: 'Either Flippers or accessible via Fake Flippers'
requires: 'flippers'
# Something that can melt ice, required for the Ice Palace.
melt_ice:
name: 'Melt Ice (Fire Rod/Usable Bombos)'
description: 'The ability to melt ice: specifically Freezors and Kholdstare.'
requires: any: [ 'fire_rod', 'use_bombos' ]
defeat_popo:
name: "Defeat Popo"
description: "Ability to defeat Popos"
requires: any: [ 'weapon', 'hookshot' ]
defeat_penguins:
name: "Defeat Penguins"
description: "Ability to defeat Penguins"
# Literally the same as above, but :shrug:
requires: any: [ 'weapon', 'hookshot' ]
avoid_laser:
name: "Avoid Eye Lasers"
description: "Please do not look directly at the eye laser."
requires: any: [ 'cape', 'blue_cane', 'shield_3' ]
# -- Use Medallions --
# These rules are to indicate a medallion may be used.
use_medallion:
name: "Use Medallion"
description: 'The ability to use a medallion.'
# In Swordless, this becomes just the hammer.
requires: any: [ 'sword_1', 'sword_2', 'sword_3', 'sword_4' ]
# This rule is effectively for replacing in Swordless.
cut_drapes:
name: "Cut Drapes"
requires: 'sword'
use_bombos:
name: "Use Bombos Medallion"
requires: [ 'use_medallion', 'bombos' ]
use_ether:
name: "Use Ether Medallion"
requires: [ 'use_medallion', 'ether' ]
use_quake:
name: "Use Quake Medallion"
requires: [ 'use_medallion', 'quake' ]
mire_medallion:
name: "Misery Mire Medallion"
# The Misery Mire medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
turtle_medallion:
name: "Turtle Rock Medallion"
# The Turtle Rock medallion is randomized. This rule is the "default": if
# you have all, you can use it.
requires: [ 'bombos', 'ether', 'quake' ]
# NOTE: You do NOT need anything to use the medallions for dungeon unlock
# purposes in swordless mode.
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: [ 'use_medallion', 'mire_medallion' ]
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: [ 'use_medallion', 'turtle_medallion' ]
flip_switch_past_barrier:
name: 'Flip Crystal Switch Past Barrier'
description: 'Something that can flip a crystal switch past a barrier.'
requires: any: [ 'boomerang', 'magic_boomerang', 'bombs', 'fire_rod',
'ice_rod', 'bow_and_arrows', 'red_cane', 'blue_cane'
]
flip_switch:
name: 'Flip Crystal Switch'
description: 'Something that can flip a crystal switch, assuming nothing is in the way.'
requires: any: [ 'flip_switch_past_barrier', 'sword', 'hookshot', 'hammer' ]
# Whatever's required to activate a pedestal. This is mainly here so that it
# can be replaced with a rule that says "hammer" in swordless runs.
pedestal_sword:
name: 'Pedestal Sword'
requires: any: [ 'sword_2', 'sword_3', 'sword_4' ]
glove:
name: 'Power Glove (or better)'
requires: any: [ 'power_glove', 'titans_mitt' ]
break_weak_wall:
name: "Bombs/Pegasus Shoes"
requires: any: [ 'bombs', 'pegasus_boots' ]
all_crystals:
name: "All Crystals"
requires: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_5',
'crystal_6', 'crystal_7' ]
# Ganon's Tower rules
gt_dms_room:
name: "Ganon's Tower - DMs Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot' ]
gt_randomizer_room:
name: "Ganon's Tower - Randomizer Room"
description: "A room in Ganon's Tower"
requires: [ 'hammer', 'hookshot', 'bombs' ]
gt_basement_left:
name: "Ganon's Tower - Left Side"
description: "Left side of the basement of Ganon's Tower"
requires: any: [ 'red_cane', 'bombs' ], all: [ 'hookshot', 'hammer' ]
gt_basement_right:
name: "Ganon's Tower - Right Side"
description: "Right side of the basement of Ganon's Tower"
requires: [ 'fire_rod', 'red_cane' ]
gt_big_key_room:
name: "Ganon's Tower - Big Key Room"
description: "The room with the Big Key in Ganon's Tower"
requires: [ 'gt_basement_clear', 'bombs' ]
gt_basement_clear:
name: "Ganon's Tower - Basement"
description: "Entering the common area with the Big Chest"
requires: any: [ 'gt_basement_left', 'gt_basement_right' ]
# REGIONS ######################################################################
#
# Regions define general areas of the map.
regions:
light_world:
name: "Light World"
points: "0,0 191,0 191,128 355,128"
dm_west:
name: "Death Mountain (West)"
requires: any: [ [ 'light', 'glove' ], 'flute' ]
dm_east:
name: "Death Mountain (East)"
requires: [ 'dm_west', { any: [ 'hookshot', [ 'hammer', 'mirror' ] ] } ]
turtle_rock_top:
name: 'Turtle Rock Medallion Platform'
# Access to the medallion platform.
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
dark_world:
name: 'Dark World'
# "The area of the Dark World you're supposed to start in" - this does NOT
# include the Moon Pearl. This rule gets crazy fast.
requires: any: [
# Simple enough: Defeat Agahnim, create portal
'castle_portal',
# Also simple: go through the portal near Flute Spot 5
[ 'eastern_portal', 'moon_pearl' ],
# Somewhat more complicated: go through Kakariko using the Titan's Mitt
# and have the flippers, allowing you to swim there.
[ 'moon_pearl', 'titans_mitt', 'flippers' ]
]
dw_nw:
name: 'Dark World (Northwest)'
# Basically "Theives Town and surroundings." Also includes all of Skull
# Woods. Note that the Moon Pearl is required to leave the starting area.
requires: any: [
'kakariko_portal',
# Access is also possible with any way to the general Dark World + the
# hookshot.
[ 'dark_world', 'moon_pearl', 'hookshot' ] ]
dw_nw_mirror:
name: 'Dark World (Northwest) and Magic Mirror'
requires: [ 'dw_nw', 'mirror' ]
dw_south:
name: 'Dark World (South)'
# The portion of the Dark World that's south of Thieve's Town and south
# of the Pyramid of Power. Always accessible if you can get to dw_nw
# (you can just drop down a ledge), or if you have Dark World access and the
# Magic Hammer.
requires: any: [ [ 'dark_world', 'hammer' ], 'dw_nw' ]
dw_mm:
name: 'Misery Mire (exterior)'
requires: [ 'titans_mitt', 'flute' ]
dw_mm_mirror:
name: 'Misery Mire (exterior) with Magic Mirror'
requires: [ 'dw_mm', 'mirror' ]
dw_dm_west:
name: 'Dark World Death Mountain (West)'
# The area you can get to as Bunny Link (note: does NOT include Ganon's
# Tower)
requires: 'dm_west'
dw_dm_east:
name: 'Dark World Death Mountain (East)'
# Basically the rest of Death Mountain from above. Also covers the
# Turtle Rock entrance.
requires: [ 'dm_east', 'titans_mitt' ]
locations:
# Locations are points on the map. They either indicate places that items can
# be retrieved or other points of interest.
#
# Locations have the following fields:
# name - the human-readable name of the location
# required - a rule defining the requirements for accessing the location,
# defaults to true (always available)
# visible - a rule defining what is required for a location to be "visible"
# which really only means "can see the item there" which defaults to
# false
# location - set of x,y coordinates for a location. Indicates where the
# location is on the 1024x512 map, where x between 0-511 are in the
# Light World and x between 512-1023 are in the Dark World. (Note that
# this makes them effectively tiles.)
# LIGHT WORLD LOCATIONS ######################################################
## DARK WORLD TELEPORTS ##
# These are not item locations, just areas where you can teleport to the
# Dark World from the Light World.
kakariko_portal:
name: 'Kakariko Village Portal'
type: 'portal'
# This portal is only useful with the Moon Pearl.
requires: any: [ 'titans_mitt', [ 'glove', 'hammer' ] ], all: 'moon_pearl'
location: [ 46, 172 ]
castle_portal:
name: 'Hyrule Castle Gate Portal'
# This is pretty much the only portal that doesn't instantly require the
# Moon Pearl
type: 'portal'
requires: 'castle_tower.cleared'
location: [ 256, 269 ]
eastern_portal:
name: 'Eastern Palace Portal (Flute 5)'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 498, 366 ]
swamp_portal:
name: 'Swamp Portal'
type: 'portal'
requires: [ 'hammer', 'glove', 'moon_pearl' ]
location: [ 232, 394 ]
desert_portal:
name: 'Desert Portal'
type: 'portal'
# Strictly speaking you don't instantly require the Moon Pearl for this one.
requires: [ 'flute', 'titans_mitt' ]
location: [ 20, 502 ]
ice_portal:
name: 'Ice Palace Portal'
type: 'portal'
# Strictly speaking you don't instantly dead-end without the Moon Pearl.
# Instead you dead-end after entering the palace and being unable to kill
# the Freezor.
requires: [ 'swimming', 'titans_mitt', 'moon_pearl' ]
location: [ 406, 444 ]
# It exists, technically:
spectacle_rock_portal:
name: "Spectacle Rock Portal"
type: 'portal'
requires: 'dm_west'
location: [ 288, 46 ]
dm_east_portal:
name: "Death Mountain Eastern Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt' ]
location: [ 394, 116 ]
turtle_rock_portal:
name: "Turtle Rock Portal"
type: 'portal'
requires: [ 'dm_east', 'titans_mitt', 'hammer' ]
location: [ 480, 28 ]
pedestal:
name: 'PI:NAME:<NAME>END_PI'
requires: [ 'red_pendant', 'blue_pendant', 'green_pendant' ]
visible: 'book'
location: [ 21, 26 ]
forest_mushroom:
name: 'Mushroom'
location: [ 61, 44 ]
thieves_cave:
name: "PI:NAME:<NAME>END_PI"
location: [ 97, 67 ]
lumberjack_cave:
name: 'PI:NAME:<NAME>END_PI'
requires: [ 'castle_tower.cleared', 'pegasus_boots' ]
visible: true
location: [ 154, 37 ]
ether_pedestal:
name: 'Ether Pedestal'
requires: [ 'dm_west', 'pedestal_sword', 'book' ]
visible: [ 'dm_west', 'book' ]
location: [ 215, 9 ]
old_man:
name: 'Lost Old Man'
requires: [ 'dm_west', 'light' ]
location: [ 229, 119 ]
spectacle_rock_cave:
name: 'Spectacle Rock Cave'
requires: 'dm_west'
location: [ 250, 74 ]
spectacle_rock:
name: 'Spectacle Rock'
requires: [ 'dm_west', 'mirror' ]
visible: 'dm_west'
location: [ 260, 44 ]
floating_island:
name: 'Floating Island'
requires: [ 'dw_dm_east', 'moon_pearl', 'bombs', 'mirror' ]
visible: 'dm_east'
location: [ 417, 12 ]
spiral_cave:
name: 'Spiral Cave'
requires: 'dm_east'
location: [ 408, 46 ]
mimic_cave:
name: 'Mimic Cave'
requires: [ 'moon_pearl', 'mirror', 'use_turtle_medallion', 'dw_dm_east' ]
location: [ 431, 47 ]
paradox_cave_basic:
name: 'Paradox Cave (no bombs)'
items: 5
requires: 'dm_east'
paradox_cave_bombs:
name: 'Paradox Cave (behind bombable wall)'
items: 2
requires: [ 'dm_east', 'bombs' ]
paradox_cave:
name: 'Paradox Cave'
merge: [ 'paradox_cave_basic', 'paradox_cave_bombs' ]
location: [ 441, 110 ]
zora:
name: 'Zora'
requires: any: [ 'glove', 'flippers' ]
rupees: 500
zora_ledge:
name: "Zora's Domain Ledge"
requires: 'flippers'
# If you can get into Zora's Domain, you can see what's on the ledge
visible: 'glove'
zoras_domain:
name: "Zora's Domain"
merge: [ "zora", "zora_ledge" ]
#location: [ 490, 64 ] - intentionally lowered as otherwise it looks like
# it was on Death Mountain
location: [ 490, 72 ]
behind_waterfall:
name: 'Waterfall Fairy'
requires: 'flippers'
items: 2
location: [ 460, 70 ]
well_floor:
name: 'Well Floor'
items: 4
well_cave:
name: 'Well Cave'
requires: 'bombs'
well:
name: 'Well'
merge: [ 'well_floor', 'well_cave' ]
location: [ 11, 217 ]
blinds_block_puzzle:
name: "Blind's House (block puzzle)"
items: 4
blinds_secret:
name: "Blind's House (bombable wall)"
requires: 'bombs'
blinds_house:
name: "Blind's House"
merge: [ 'blinds_block_puzzle', 'blinds_secret' ]
location: [ 65, 215 ]
bottle_vendor:
name: 'Bottle Vendor'
rupees: 100
location: [ 48, 238 ]
chicken_house:
name: 'Chicken House'
requires: 'bombs'
location: [ 50, 277 ]
sick_kid:
name: 'PI:NAME:<NAME>END_PI',
requires: 'bottle'
location: [ 79, 274 ]
tavern:
name: 'PI:NAME:<NAME>END_PI'
location: [ 81, 290 ]
blacksmiths:
name: 'PI:NAME:<NAME>END_PI'
# Does NOT require the mirror: the guy will follow after save and quit
requires: [ 'dw_nw', 'titans_mitt' ]
location: [ 154, 273 ]
magic_bat:
name: 'Magic Bat'
requires: any: [ 'hammer', 'dw_nw_mirror' ], all: 'magic_powder'
location: [ 165, 287 ]
sanctuary_bonk_rocks:
name: "Sanctuary Bonk Rocks"
requires: 'pegasus_boots'
location: [ 199, 149 ]
sanctuary:
name: 'Sanctuary'
location: [ 235, 136 ]
castle_graveyard:
name: 'Castle Under Graveyard'
items: 3
requires: 'break_weak_wall'
location: [ 265, 149 ]
graveyard_cave:
name: 'Graveyard Cave'
requires: [ 'dw_nw', 'mirror' ]
location: [ 291, 140 ]
kings_tomb:
name: "King's Tomb"
requires: [ 'pegasus_boots', { any: [ 'titans_mitt', [ 'dw_nw', 'mirror' ] ] } ]
location: [ 307, 151 ]
secret_passage:
name: "Secret Passage (Uncle)"
items: 2
location: [ 304, 212 ]
witch:
name: "Witch's Hut"
requires: 'mushroom'
location: [ 409, 171 ]
sahasrala:
name: 'Sahasrala'
requires: 'green_pendant'
location: [ 414, 236 ]
sahasralas_hut:
name: "Sahasrala's Hut"
requires: 'break_weak_wall'
location: [ 414, 220 ]
race_game:
name: "15 Second Game"
requires: 'break_weak_wall'
location: [ 49, 368 ]
library:
name: "Library"
requires: 'pegasus_boots'
visible: true
location: [ 79, 336 ]
haunted_grove:
name: "Haunted Grove"
requires: 'shovel'
location: [ 146, 338 ]
links_house:
name: "Link's House"
location: [ 279, 352 ]
desert_cliff:
name: "DesPI:NAME:<NAME>END_PI"
requires: any: [ 'book', 'dw_mm_mirror' ]
visible: true
location: [ 12, 464 ]
checkboard_cave:
name: "Checkerboard Cave"
requires: [ "dw_mm", "mirror" ]
location: [ 90, 397 ]
aginahs_cave:
name: "Aginah's Cave"
requires: 'bombs'
location: [ 102, 422 ]
south_of_grove_cave:
name: 'Cave South of Haunted Grove'
requires: [ 'dw_south', 'mirror' ]
location: [ 136, 422 ]
bombos_pedestal:
name: "Bombos Pedestal"
requires: [ "book", "dw_south", "mirror", "pedestal_sword" ]
visible: [ "book", "dw_south", "mirror" ]
location: [ 112, 470 ]
dam:
name: "Dam"
items: 2
location: [ 240, 478 ]
mini_moldorm_cave:
name: "Mini Moldorm Cave"
items: 5
requires: "bombs"
location: [ 334, 480 ]
hylia_island:
name: "Hylia Island"
requires: [ "dw_south", "moon_pearl", "flippers", "mirror" ]
# You can't actually see it from shore
visible: 'swimming'
location: [ 373, 426 ]
ice_rod_cave:
name: "Ice Rod Cave"
requires: "bombs"
location: [ 458, 395 ]
hobo:
name: 'Hobo Under the Bridge'
requires: 'swimming'
location: [ 358, 354 ]
# DARK WORLD LOCATIONS #######################################################
bumper_cave:
name: 'Bumper Cave'
requires: [ 'dw_nw', 'moon_pearl', 'cape' ]
visible: 'dw_nw'
location: [ 695, 77 ]
spike_cave:
name: 'Spike Cave'
requires: [ 'moon_pearl', 'hammer', 'dw_dm_west' ]
location: [ 805, 73 ]
hookshot_cave_full:
name: 'Hookshot Cave (three chests)'
requires: [ 'moon_pearl', 'dw_dm_east', 'glove', 'hookshot' ]
items: 3
hookshot_cave_bonk:
name: 'Hookshot Cave (bottom chest)'
requires: any: [ 'pegasus_boots', 'hookshot' ], all: [ 'moon_pearl', 'dw_dm_east', 'glove' ]
hookshot_cave:
name: 'Hookshot Cave'
merge: [ 'hookshot_cave_full', 'hookshot_cave_bonk' ]
location: [ 938, 33 ]
death_mountain_cave:
name: 'Death Mountain Cave'
items: 2
requires: [ 'dw_dm_east', 'moon_pearl' ]
location: [ 943, 74 ]
catfish:
name: 'Catfish'
requires: [ 'dark_world', 'moon_pearl', 'glove' ]
location: [ 969, 88 ]
chest_game:
name: 'Chest Game'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 537, 239 ]
doorless_hut:
name: 'Doorless Hut'
requires: [ 'moon_pearl', 'dw_nw', 'bombs' ]
location: [ 568, 298 ]
c_house:
name: 'C-Shaped House'
requires: [ 'moon_pearl', 'dw_nw' ]
location: [ 618, 246 ]
peg_cave:
name: 'Peg Cave'
# Don't bother listing dw_nw as a prereq because the titans_mitt gets you
# there anyway
requires: [ 'moon_pearl', 'titans_mitt', 'hammer' ]
location: [ 674, 309 ]
purple_chest:
name: 'Locked Chest'
# This is the only item that depends on a different location. Weird!
requires: [ 'blacksmiths' ]
location: [ 666, 272 ]
fat_fairy:
name: 'Fat Fairy'
requires: [ 'moon_pearl', 'superbomb', 'dark_world' ]
location: [ 752, 248 ]
pyramid_ledge:
name: 'Pyramid Ledge'
requires: 'dark_world'
location: [ 808, 230 ]
digging_game:
name: 'Digging Game'
requires: [ 'dw_south', 'moon_pearl' ]
rupees: 80
location: [ 540, 354 ]
stumpy:
name: 'Flute Kid'
requires: [ 'dw_south', 'moon_pearl' ]
location: [ 669, 350 ]
mini_myre_cave:
name: 'Mini Myre Cave'
requires: 'dw_mm'
items: 2
location: [ 531, 409 ]
hype_cave:
name: 'Hype Cave'
items: 5
requires: [ 'dw_south', 'moon_pearl', 'bombs' ]
location: [ 818, 399 ]
superbomb:
name: 'PI:NAME:<NAME>END_PI'
type: 'superbomb'
rupees: 100
location: [ 791, 352 ]
requires: [ 'dw_south', 'moon_pearl', 'crystal_5', 'crystal_6' ]
dungeons:
# These are almost locations, but with a few extra fields.
#
# enter - rule for requirements to (meaningfully) enter the dungeon
# (meaningfully in this case basically means "and get past the first
# room" - specific examples are flippers for Swamp Palace and a way to
# defeat the Freezor in the Ice Palace)
# boss - information about the boss:
# name - the boss's name
# defeat - requirements for defeating the boss
# access - requirements for even accessing the boss (if any)
# items - array describing all items within the dungeon. Names should be
# the names as defined in the spoiler file.
# keys - number of keys that are in the random shuffle
# notInPool - if given, a list (or single string) of generic dungeon items
# that happen to be missing from the item pool and should not count against
# chests
hyrule_castle_escape:
name: "Hyrule Castle Escape"
# This currently has no location and is therefore not shown on the map.
items: [
# Listed elsewhere:
# "Sanctuary"
# { name: "Secret Room - Left", access: 'break_weak_wall' }
# { name: "Secret Room - Middle", access: 'break_weak_wall' }
# { name: "Secret Room - Right", access: 'break_weak_wall' }
{ name: "Dark Cross", access: 'light' }
{ name: "Boomerang Chest", access: 'weapon' }
{ name: "Map Chest", access: 'weapon' }
{ name: "Zelda's Cell", access: 'weapon' }
# Also listed elsewhere:
# { name: "Link's Uncle", type: "uncle" }
# "Secret Passage"
]
keys: 1
# Compass not only isn't in the pool, it doesn't exist.
notInPool: [ 'compass', 'bigKey' ]
eastern_palace:
name: "Eastern Palace"
location: [ 490, 198 ]
boss:
name: "PI:NAME:<NAME>END_PI"
# Armos don't require the bow. However, there is a Red Eyegore blocking
# access to them, and it DOES require the bow.
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'bow_and_arrows', 'light' ]
items: [
"Compass Chest"
{ name: "Big Chest", type: "big" }
"Cannonball Chest"
{ name: "Big Key Chest", access: "light" }
"Map Chest"
]
keys: 0
desert_palace:
name: "Desert PalPI:NAME:<NAME>END_PI"
location: [ 37, 408 ]
enter: any: [ 'book', 'dw_mm_mirror' ]
boss:
name: "PI:NAME:<NAME>END_PI"
defeat: any: [ 'weapon', 'ice_rod' ]
access: [ 'light_torch', 'glove' ]
items: [
{ name: "Big Chest", type: "big" }
"Map Chest"
{ name: "Torch", type: "dash" }
{ name: "Big Key Chest", access: "defeat_popo" }
"Compass Chest"
]
keys: 1
heras_tower:
name: "PI:NAME:<NAME>END_PI"
location: [ 287, 16 ]
enter: any: [ [ 'hookshot', 'hammer'], 'mirror' ], all: [ 'dm_west', 'flip_switch' ]
boss:
name: "PI:NAME:<NAME>END_PI"
defeat: any: [ "sword", "hammer" ]
items: [
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Basement Cage", type: "standing" }
"Map Chest"
{ name: "Compass Chest", afterBigKey: true }
{ name: "Big Chest", type: "big" }
]
keys: 1
castle_tower:
name: "Castle Tower"
location: [ 255, 203 ]
enter: any: [ 'master_sword', 'cape' ]
boss:
name: "PI:NAME:<NAME>END_PI"
prize: false
defeat: any: [ 'sword', 'net' ]
access: [ 'cut_drapes', 'light' ]
afterBigKey: false
items: [
{ name: "Castle Tower - Room 03", access: "weapon" }
{ name: "Castle Tower - Dark Maze", access: [ "light", "weapon" ] }
]
keys: 2
notInPool: [ 'map', 'compass', 'bigKey' ]
dark_palace:
name: "Palace of Darkness"
location: [ 1002, 200 ]
# Also requires 110 rupees
enter: [ 'dark_world', 'moon_pearl' ]
boss:
name: "PI:NAME:<NAME>END_PI"
# I'm a bit unclear on what weapons are useful against him, but the
# hammer/bow and arrow requirement for getting there means you can
# definitely defeat him.
defeat: [ 'sword', 'bow_and_arrows' ]
access: [ 'light', 'hammer', 'bow_and_arrows' ]
items: [
"Shooter Room"
{ name: "Big Key Chest", access: "bombs" }
{ name: "The Arena - Ledge", access: [ "bow_and_arrows", "bombs" ] }
"The Arena - Bridge" # Technically behind a key
"Stalfos Basement" # Also behind a key
"Map Chest"
{ "Big Chest", type: "big", access: [ "light", "bombs" ] }
"Compass Chest" # Not sure how many keys are needed
"Harmless Hellway"
{ name: "Dark Basement - Left", access: "light" }
{ name: "Dark Basement - Right", access: "light" }
{ name: "Dark Maze - Top", access: "light" }
{ name: "Dark Maze - Bottom", access: "light" }
]
keys: 6
swamp_palace:
name: "Swamp Palace"
location: [ 752, 478 ]
enter: [ 'dw_south', 'mirror', 'flippers' ]
boss:
name: "PI:NAME:<NAME>END_PI"
defeat: any: [ 'sword', 'hammer' ], all: 'hookshot'
access: 'hookshot'
afterBigKey: false
items: [
"Entrance"
# All the "hammer" access requirements are because the first flood control
# is behind mole-peg-things
{ name: "Big Chest", type: "big", access: 'hammer' }
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Map Chest", access: 'hammer' }
{ name: "West Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'hammer' }
{ name: "Flooded Room - Left", access: 'hammer' }
{ name: "Flooded Room - Right", access: 'hammer' }
{ name: "Waterfall Room", access: 'hammer' }
]
keys: 1
skull_woods:
name: "PI:NAME:<NAME>END_PI"
location: [ 531, 26 ]
enter: 'dw_nw'
boss:
name: "PI:NAME:<NAME>END_PI"
defeat: 'fire_rod'
access: 'cut_drapes'
afterBigKey: false
items: [
{ name: "Big Chest", type: "big", access: 'bombs' }
"Big Key Chest"
"Compass Chest"
"Map Chest"
{ name: "Bridge Room", access: "fire_rod" }
"Pot Prison"
"Pinball Room"
]
keys: 3
thieves_town:
name: "Thieves' Town"
location: [ 576, 249 ]
enter: [ 'dw_nw', 'glove' ]
boss:
name: "PI:NAME:<NAME>END_PI"
defeat: any: [ 'sword', 'hammer', 'red_cane', 'blue_cane' ]
# This is pointless because there are bomb pots in the room, but whatever:
access: 'bombs'
items: [
{ name: "Attic", access: "flip_switch" }
"Big Key Chest"
"Map Chest"
"Compass Chest"
"Ambush Chest"
{ name: "Big Chest", type: "big", access: [ "flip_switch", "hammer" ] }
{ name: "Blind's Cell", access: "flip_switch" }
]
keys: 1
ice_palace:
name: "PI:NAME:<NAME>END_PI"
location: [ 920, 442 ]
enter: [ 'moon_pearl', 'flippers', 'titans_mitt', 'melt_ice' ]
boss:
name: "PI:NAME:<NAME>END_PI"
access: 'hammer'
defeat: [ 'melt_ice', 'weapon' ]
items: [
# Some of these access rules are basically useless since they're met by
# being able to access the palace at all. However they're kept on the
# off-chance this ever hooks in to the level where it can see magic
# levels and inventory capacity to deal with toggling magic items on and
# off.
{ name: "Big Key Chest", access: 'hammer' }
{ name: "Compass Chest", access: 'defeat_penguins' }
{ name: "Map Chest", access: 'hammer' }
{ name: "Spike Room", access: 'defeat_penguins' }
{ name: "Freezor Chest", access: 'melt_ice' }
"Iced T Room"
{ name: "Big Chest", access: any: [ 'bombs', 'hookshot' ] }
]
keys: 2
misery_mire:
name: "PI:NAME:<NAME>END_PI"
location: [ 550, 441 ]
medallion: 'mire_medallion'
enter: [ 'dw_mm', 'use_mire_medallion', { any: [ 'hookshot', 'pegasus_boots' ] } ]
boss:
name: "PI:NAME:<NAME>END_PI"
access: 'red_cane'
defeat: any: [ 'sword', 'bow_and_arrows' ]
items: [
{ name: "Big Chest", type: "big" }
"Main Lobby"
{ name: "Big Key Chest", access: "light_torch" }
{ name: "Compass Chest", access: "light_torch" }
"Bridge Chest"
"Map Chest"
"Spike Chest"
]
keys: 3
turtle_rock:
name: "Turtle Rock"
location: [ 994, 43 ]
medallion: 'turtle_medallion'
enter: [ 'dw_dm_east', 'titans_mitt', 'hammer', 'use_turtle_medallion', 'red_cane' ]
boss:
name: 'PI:NAME:<NAME>END_PI'
defeat: [ 'ice_rod', 'fire_rod', 'weapon' ]
access: [ 'red_cane', 'light' ]
items: [
{ name: "Chain Chomps", access: 'flip_switch_past_barrier' }
"Compass Chest"
{ name: "Roller Room - Left", access: 'fire_rod' }
{ name: "Roller Room - Right", access: 'fire_rod' }
{ name: "Big Chest", type: "big" }
"Big Key Chest"
{ name: "Crystaroller Room", access: 'flip_switch_past_barrier' }
{ name: "Eye Bridge - Bottom Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Bottom Right", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Left", access: 'avoid_laser' }
{ name: "Eye Bridge - Top Right", access: 'avoid_laser' }
]
keys: 4
ganons_tower:
name: "PI:NAME:<NAME>END_PIanon's Tower"
# Note that this is "east" because "west" doesn't include the tower, despite
# the fact it's on the "west" side.
enter: [ 'dw_dm_east', 'all_crystals' ]
location: [ 800, 0 ]
boss:
name: "PI:NAME:<NAME>END_PI"
prize: false
items: [
{ name: "PI:NAME:<NAME>END_PI", type: "dash" },
{ name: "DMs Room - Top Left", access: 'gt_dms_room' },
{ name: "DMs Room - Top Right", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Left", access: 'gt_dms_room' },
{ name: "DMs Room - Bottom Right", access: 'gt_dms_room' },
{ name: "Randomizer Room - Top Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Top Right", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Left", access: 'gt_randomizer_room' },
{ name: "Randomizer Room - Bottom Right", access: 'gt_randomizer_room' },
{ name: "Firesnake Room", access: 'gt_basement_left' },
{ name: "Map Chest", access: any: [ 'hookshot', 'pegasus_boots' ], all: 'hammer' },
{ name: "Big Chest", type: "big", access: 'gt_basement_clear' },
"Hope Room - Left"
"Hope Room - Right"
{ name: "Bob'PI:NAME:<NAME>END_PI Chest", access: 'gt_basement_clear' },
{ name: "Tile Room", access: 'red_cane' },
{ name: "Compass Room - Top Left", access: 'gt_basement_right' },
{ name: "Compass Room - Top Right", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Left", access: 'gt_basement_right' },
{ name: "Compass Room - Bottom Right", access: 'gt_basement_right' },
{ name: "Big Key Chest", access: 'gt_big_key_room' },
{ name: "Big Key Room - Left", access: 'gt_big_key_room' },
{ name: "Big Key Room - Right", access: 'gt_big_key_room' },
{ name: "Mini Helmasaur Room - Left", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Mini Helmasaur Room - Right", access: 'bow_and_arrows', afterBigKey: true },
{ name: "Pre-Moldorm Chest", access: [ 'bow_and_arrows', 'bombs' ], afterBigKey: true },
{ name: "Moldorm Chest", access: [ 'bow_and_arrows', 'hookshot', 'sword', 'bombs' ], afterBigKey: true }
]
keys: 4
slots:
# Slots just defines names for "default" slots that can then be referred to in
# the layout. It's also intended to allow certain modes to override slots
# (namely to allow swordless to make the sword blank).
#
# null indicates an empty space (nothing equipped).
#
# Note that these are not rules: "sword" is already a rule that means "has any
# sword" and "glove" is a rule for "has any glove."
sword:
name: 'Sword'
items: [ null, "sword_1", "sword_2", "sword_3", "sword_4" ]
armor:
name: 'Armor'
items: [ "green_jerkin", "blue_mail", "red_mail" ]
shield:
name: 'Shield'
items: [ null, "shield_1", "shield_2", "shield_3" ]
glove:
name: 'Gloves'
items: [ null, "power_glove", "titans_mitt" ]
bow_and_arrows:
name: 'Bow'
items: [ null, 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ]
# This is used by dungeons to sort through possible entry medallions.
medallions:
name: 'Medallion'
items: [ 'bombos', 'ether', 'quake' ]
prizes:
# Prizes define what is awarded when a dungeon completes that is NOT the boss
# item drop. The order here is the order they're cycled through in the UI.
crystal: [ 'crystal_1', 'crystal_2', 'crystal_3', 'crystal_4', 'crystal_7' ]
superbomb_crystal: [ 'crystal_5', 'crystal_6' ]
green_pendant: [ 'green_pendant' ]
pendant: [ 'red_pendant', 'blue_pendant' ]
layout:
# Layout defines the default layout if nothing overrides it.
# Items defines the space that items are in. Items below are in "status
# screen" order, with items that would share a slot being in arrays.
# Note: "bow_and_arrows" is a "special" UI that pulls from "slots" for what it
# should contain by default.
items: [
[ "bow_and_arrows", [ "boomerang", "magic_boomerang" ], "hookshot", "bombs", [ "mushroom", "magic_powder" ] ]
[ "fire_rod", "ice_rod", "bombos", "ether", "quake" ]
[ "lamp", "hammer", [ "shovel", "flute" ], "net", "book" ]
[ "bottle", "red_cane", "blue_cane", "cape", "mirror" ]
]
# Equipment is simply a list of either slot IDs (which mask item IDs if they
# happen to share them) or item IDs (if an item is present or not).
equipment: [
"sword", "armor", "shield", "pegasus_boots", "glove", "flippers",
"moon_pearl"
]
# List of dungeons. Each key is actually treated as a new row, and multiple
# keys can be provided.
dungeons:
light: [
"hyrule_castle_escape"
"castle_tower"
"eastern_palace"
"desert_palace"
"heras_tower"
]
dark: [
"dark_palace"
"swamp_palace"
"skull_woods"
"thieves_town"
"ice_palace"
"misery_mire"
"turtle_rock"
"ganons_tower"
]
maps: [ "light-world", "dark-world" ]
footer: { }
defaults: [
"bombs"
]
# The various known logics.
logics:
standard:
# This is the generic logic. It changes nothing, but needs to exist so that
# users can select it.
name: "Standard"
# Changes for Swordless logic
swordless:
name: "Swordless"
rules:
use_medallion:
# In Swordless, the medallions can always be used within the logic
# defined in here (there are special Medallion Tiles)
requires: true
cut_drapes:
# Drapes don't need cutting in swordless.
requires: true
melt_ice:
# Swordless allows the Ice Palace to completed with just Bombos even
# without the hammer.
requires: any: [ 'fire_rod', 'bombos' ]
# Hammer isn't required to open dungeons
use_mire_medallion:
name: "Use Misery Mire Medallion"
requires: 'mire_medallion'
use_turtle_medallion:
name: "Use Turtle Rock Medallion"
requires: 'turtle_medallion'
slots:
# Override sword to not be selectable
sword: [ null ]
# Changes for retro logic
retro:
name: "Retro"
rules:
# Bow now needs a quiver to be useful
bow_and_arrows:
requires: any: [ 'bow_and_wooden_arrows', 'bow_and_silver_arrows' ], all: 'rupee_quiver'
none:
name: 'No Logic'
defaults: [ 'bombs', 'pegasus_boots' ]
# Probably going to just special-case this rather than have it rewrite all the rules to true
version:
date: '2020-07-25'
alttpr: 'v31' |
[
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com> \n© Copyright 2006 Goog",
"end": 39,
"score": 0.9998916387557983,
"start": 26,
"tag": "NAME",
"value": "Stephan Jorek"
},
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>... | src/Dom/Document.coffee | sjorek/goatee.js | 0 | ###
© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
#~ require
{Constants:{
CSS_display,
CSS_position
}} = require '../Core/Constants'
{Node:{
DOCUMENT_NODE #, DOCUMENT_FRAGMENT_NODE
}} = require './Node'
{Utility:{
camelize,
dashify
}} = require '../Core/Utility'
createTraversal = require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/ElementTraversal').ElementTraversal.create
#!require('./Traversal/ElementChildren').ElementChildren.create
#!require('./Traversal/Level2ChildNodes').Level2ChildNodes.create
#!require('./Traversal/Level2NodeIterator').Level2NodeIterator.create
#!require('./Traversal/Level2TreeWalker').Level2TreeWalker.create
#~ export
exports = module?.exports ? this
# Document (≠ DOMDocument)
# ================================
# --------------------------------
# This module provides shortcuts depending on generic- or browser-based
# DOM implementations.
#
# @public
# @module Document
# @namespace goatee.Core
# @author Steffen Meschkat <mesch@google.com>
# @author Stephan Jorek <stephan.jorek@gmail.com>
# @type {Object}
exports.Document = Document =
# --------------------------------
# Global target document reference. Defaults to `null` if `window.document`
# is not available.
#
# @static
# @public
# @property document
# @type {Document}
document: window?.document || null
# --------------------------------
# Get an element-node by its id
#
# @static
# @public
# @method getElementById
# @param {DOMString} id
# @param {Document} [doc=Document.document]
# @returns {Node|null}
getElementById: (id, doc) ->
(doc || Document.document).getElementById(id)
# --------------------------------
# Creates a new node in the given document
#
# @static
# @public
# @method createElement
# @param {DOMString} name The name of new element
# (i.e. the tag name)
# @param {Document} [doc=Document.document] The target document
# @return {Element} A newly constructed element
createElement: (name, doc) ->
(doc || Document.document).createElement(name)
# --------------------------------
# Creates a new text node in the given document.
#
# @static
# @public
# @method createTextNode
# @param {DOMString} text Text composing new text node
# @param {Document} [doc=Document.document] The target document
# @return {Text} A newly constructed text node
createTextNode: (text, doc) ->
(doc || Document.document).createTextNode text
# --------------------------------
# Traverses the element nodes in the DOM section underneath the given
# node and invokes the given callback as a method on every element
# node encountered.
#
# @static
# @public
# @method traverseElements
# @param {Element} node Parent element of the subtree to traverse
# @param {Function} callback Called on each node in the traversal
# @return {goatee.Dom.Traversal}
traverseElements: (node, callback) ->
createTraversal(callback).run node
# --------------------------------
# Test if an attribute exists.
#
# The implementation uses `Element.prototype.hasAttribute` if available,
# otherwise it's a simple redirect to `Document.getAttribute`.
#
# @static
# @public
# @method hasAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
hasAttribute: if HTMLElement?::hasAttribute?
then (node, name) -> node.hasAttribute name
else (node, name) -> Document.getAttribute(node, name)?
# --------------------------------
# Get an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method getAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
getAttribute: (node, name) ->
# NOTE(mesch): Neither in IE nor in Firefox, HTML DOM attributes implement
# namespaces. All items in the attribute collection have `null` localName
# and namespaceURI attribute values. In IE, we even encounter DIV elements
# that don't implement the method `getAttributeNS()`.
node.getAttribute name
# --------------------------------
# Set an attribute in the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method setAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to set
# @param {DOMString|Number} value Set attribute to this value
setAttribute: (node, name, value) ->
node.setAttribute name, value
return
# --------------------------------
# Remove an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method removeAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to remove
removeAttribute: (node, name) ->
node.removeAttribute(name)
return
# --------------------------------
# Test if an data-attribute exists.
#
# This is the place to implement faster alternatives, i.e. by using
# `hasAttribute` or the like.
#
# @static
# @public
# @method hasData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {Boolean} Flag indicating if the data-attribute exists
hasData: if (_dataSetAvailable = HTMLElement?::dataset? and DOMStringMap?)
then (node, name) ->
node.dataset?[camelize name]?
else (node, name) ->
Document.hasAttribute node, "data-#{dashify name}"
# --------------------------------
# Get an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `getAttribute` or the like.
#
# @static
# @public
# @method getData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {DOMString|null} Resulting data-attribute
getData: if _dataSetAvailable
then (node, name) ->
if node.dataset? then node.dataset[camelize name] else null
else (node, name) ->
Document.getAttribute node, "data-#{dashify name}"
# --------------------------------
# Set an data-attribute in the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `setAttribute` or the like.
#
# @static
# @public
# @method setData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to set
# @param {DOMString} value Set data-attribute to this value
setData: if _dataSetAvailable
then (node, name, value) ->
node.dataset[camelize name] = value
return
else (node, name, value) ->
Document.setAttribute node, "data-#{dashify name}", value
return
# --------------------------------
# Remove an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `removeAttribute` or the like.
#
# @static
# @public
# @method removeData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to remove
removeData: if _dataSetAvailable
then (node, name) ->
delete node.dataset[camelize name]
return
else (node, name) ->
Document.removeAttribute node, "data-#{dashify name}"
return
# --------------------------------
# Clone a node in the DOM.
#
# @static
# @public
# @method cloneNode
# @param {Node} node Node to clone
# @return {Node} Cloned node
cloneNode: (node) ->
# NOTE(mesch): we never so far wanted to use `node.cloneNode(false);`,
# hence we default to `true` (=deep clone).
node.cloneNode true
# --------------------------------
# Clone a element in the DOM. Alias of `Document.cloneNode(node);` above.
#
# @static
# @public
# @method cloneElement
# @param {Element} element Element to clone
# @return {Element} Cloned element
cloneElement: (element) ->
@cloneNode element
# --------------------------------
# Returns the document owner of the given element. In particular, returns
# `window.document` if node is null or the browser does not support the
# `ownerDocument`-method. Returns the node, if the node is a document itself.
#
# @static
# @public
# @method ownerDocument
# @param {Node} [node] The node whose ownerDocument
# is requested
# @param {Document} [doc=Document.document] The optional fallback-value
# @return {Document} The owner-document or if
# unsupported `window.document`
ownerDocument: (node, doc) ->
# TODO: What about document-fragment-nodes ?
return doc || Document.document if not node? # …
#!… or `node.nodeType == DOCUMENT_FRAGMENT_NODE`
# We deliberately enforce equality instead of identity here.
return node if `node.nodeType == DOCUMENT_NODE`
return node.ownerDocument || doc || Document.document
# --------------------------------
# Appends a new child to the specified (parent) node.
#
# @static
# @public
# @method appendChild
# @param {Element} node The parent element
# @param {Node} child The child-node to append
# @return {Node} The newly appended node
appendChild: (node, child) ->
node.appendChild child
# --------------------------------
# Sets display to default.
#
# @static
# @public
# @method displayDefault
# @param {Element} node The DOM element to manipulate
displayDefault: (node) ->
node.style[CSS_display] = ''
return
# --------------------------------
# Sets display to none. Doing this as a function saves a few bytes for
# the 'style.display' property and the 'none' literal.
#
# @static
# @public
# @method displayNone
# @param {Element} node The DOM element to manipulate
displayNone: (node) ->
node.style[CSS_display] = 'none'
return
# --------------------------------
# Sets position style attribute to default.
#
# @static
# @public
# @method positionDefault
# @param {Element} node The DOM element to manipulate
positionDefault: (node) ->
node.style[CSS_position] = ''
return
# --------------------------------
# Sets position style attribute to absolute.
#
# @static
# @public
# @method positionAbsolute
# @param {Element} node The DOM element to manipulate
positionAbsolute: (node) ->
node.style[CSS_position] = 'absolute'
return
# --------------------------------
# Inserts a new child before a given sibling.
#
# @static
# @public
# @method insertBefore
# @param {Node} newChild The node to insert
# @param {Node} oldChild The sibling node
# @return {Node} A reference to the new child
insertBefore: (newChild, oldChild) ->
oldChild.parentNode.insertBefore newChild, oldChild
# --------------------------------
# Replaces an old child node with a new child node.
#
# @static
# @public
# @method replaceChild
# @param {Node} newChild The new child to append
# @param {Node} oldChild The old child to remove
# @return {Node} The replaced node
replaceChild: (newChild, oldChild) ->
oldChild.parentNode.replaceChild newChild, oldChild
# --------------------------------
# Removes a node from the DOM.
#
# @static
# @public
# @method removeNode
# @param {Node} node The node to remove
# @return {Node} The removed node
removeNode: (node) ->
Document.removeChild node.parentNode, node
# --------------------------------
# Remove a child from the specified (parent) node.
#
# @static
# @public
# @method removeChild
# @param {Element} node The parent element
# @param {Node} child The child-node to remove
# @return {Node} The removed node
removeChild: (node, child) ->
node.removeChild child
| 214019 | ###
© Copyright 2013-2014 <NAME> <<EMAIL>>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
#~ require
{Constants:{
CSS_display,
CSS_position
}} = require '../Core/Constants'
{Node:{
DOCUMENT_NODE #, DOCUMENT_FRAGMENT_NODE
}} = require './Node'
{Utility:{
camelize,
dashify
}} = require '../Core/Utility'
createTraversal = require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/ElementTraversal').ElementTraversal.create
#!require('./Traversal/ElementChildren').ElementChildren.create
#!require('./Traversal/Level2ChildNodes').Level2ChildNodes.create
#!require('./Traversal/Level2NodeIterator').Level2NodeIterator.create
#!require('./Traversal/Level2TreeWalker').Level2TreeWalker.create
#~ export
exports = module?.exports ? this
# Document (≠ DOMDocument)
# ================================
# --------------------------------
# This module provides shortcuts depending on generic- or browser-based
# DOM implementations.
#
# @public
# @module Document
# @namespace goatee.Core
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
# @type {Object}
exports.Document = Document =
# --------------------------------
# Global target document reference. Defaults to `null` if `window.document`
# is not available.
#
# @static
# @public
# @property document
# @type {Document}
document: window?.document || null
# --------------------------------
# Get an element-node by its id
#
# @static
# @public
# @method getElementById
# @param {DOMString} id
# @param {Document} [doc=Document.document]
# @returns {Node|null}
getElementById: (id, doc) ->
(doc || Document.document).getElementById(id)
# --------------------------------
# Creates a new node in the given document
#
# @static
# @public
# @method createElement
# @param {DOMString} name The name of new element
# (i.e. the tag name)
# @param {Document} [doc=Document.document] The target document
# @return {Element} A newly constructed element
createElement: (name, doc) ->
(doc || Document.document).createElement(name)
# --------------------------------
# Creates a new text node in the given document.
#
# @static
# @public
# @method createTextNode
# @param {DOMString} text Text composing new text node
# @param {Document} [doc=Document.document] The target document
# @return {Text} A newly constructed text node
createTextNode: (text, doc) ->
(doc || Document.document).createTextNode text
# --------------------------------
# Traverses the element nodes in the DOM section underneath the given
# node and invokes the given callback as a method on every element
# node encountered.
#
# @static
# @public
# @method traverseElements
# @param {Element} node Parent element of the subtree to traverse
# @param {Function} callback Called on each node in the traversal
# @return {goatee.Dom.Traversal}
traverseElements: (node, callback) ->
createTraversal(callback).run node
# --------------------------------
# Test if an attribute exists.
#
# The implementation uses `Element.prototype.hasAttribute` if available,
# otherwise it's a simple redirect to `Document.getAttribute`.
#
# @static
# @public
# @method hasAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
hasAttribute: if HTMLElement?::hasAttribute?
then (node, name) -> node.hasAttribute name
else (node, name) -> Document.getAttribute(node, name)?
# --------------------------------
# Get an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method getAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
getAttribute: (node, name) ->
# NOTE(mesch): Neither in IE nor in Firefox, HTML DOM attributes implement
# namespaces. All items in the attribute collection have `null` localName
# and namespaceURI attribute values. In IE, we even encounter DIV elements
# that don't implement the method `getAttributeNS()`.
node.getAttribute name
# --------------------------------
# Set an attribute in the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method setAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to set
# @param {DOMString|Number} value Set attribute to this value
setAttribute: (node, name, value) ->
node.setAttribute name, value
return
# --------------------------------
# Remove an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method removeAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to remove
removeAttribute: (node, name) ->
node.removeAttribute(name)
return
# --------------------------------
# Test if an data-attribute exists.
#
# This is the place to implement faster alternatives, i.e. by using
# `hasAttribute` or the like.
#
# @static
# @public
# @method hasData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {Boolean} Flag indicating if the data-attribute exists
hasData: if (_dataSetAvailable = HTMLElement?::dataset? and DOMStringMap?)
then (node, name) ->
node.dataset?[camelize name]?
else (node, name) ->
Document.hasAttribute node, "data-#{dashify name}"
# --------------------------------
# Get an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `getAttribute` or the like.
#
# @static
# @public
# @method getData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {DOMString|null} Resulting data-attribute
getData: if _dataSetAvailable
then (node, name) ->
if node.dataset? then node.dataset[camelize name] else null
else (node, name) ->
Document.getAttribute node, "data-#{dashify name}"
# --------------------------------
# Set an data-attribute in the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `setAttribute` or the like.
#
# @static
# @public
# @method setData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to set
# @param {DOMString} value Set data-attribute to this value
setData: if _dataSetAvailable
then (node, name, value) ->
node.dataset[camelize name] = value
return
else (node, name, value) ->
Document.setAttribute node, "data-#{dashify name}", value
return
# --------------------------------
# Remove an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `removeAttribute` or the like.
#
# @static
# @public
# @method removeData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to remove
removeData: if _dataSetAvailable
then (node, name) ->
delete node.dataset[camelize name]
return
else (node, name) ->
Document.removeAttribute node, "data-#{dashify name}"
return
# --------------------------------
# Clone a node in the DOM.
#
# @static
# @public
# @method cloneNode
# @param {Node} node Node to clone
# @return {Node} Cloned node
cloneNode: (node) ->
# NOTE(mesch): we never so far wanted to use `node.cloneNode(false);`,
# hence we default to `true` (=deep clone).
node.cloneNode true
# --------------------------------
# Clone a element in the DOM. Alias of `Document.cloneNode(node);` above.
#
# @static
# @public
# @method cloneElement
# @param {Element} element Element to clone
# @return {Element} Cloned element
cloneElement: (element) ->
@cloneNode element
# --------------------------------
# Returns the document owner of the given element. In particular, returns
# `window.document` if node is null or the browser does not support the
# `ownerDocument`-method. Returns the node, if the node is a document itself.
#
# @static
# @public
# @method ownerDocument
# @param {Node} [node] The node whose ownerDocument
# is requested
# @param {Document} [doc=Document.document] The optional fallback-value
# @return {Document} The owner-document or if
# unsupported `window.document`
ownerDocument: (node, doc) ->
# TODO: What about document-fragment-nodes ?
return doc || Document.document if not node? # …
#!… or `node.nodeType == DOCUMENT_FRAGMENT_NODE`
# We deliberately enforce equality instead of identity here.
return node if `node.nodeType == DOCUMENT_NODE`
return node.ownerDocument || doc || Document.document
# --------------------------------
# Appends a new child to the specified (parent) node.
#
# @static
# @public
# @method appendChild
# @param {Element} node The parent element
# @param {Node} child The child-node to append
# @return {Node} The newly appended node
appendChild: (node, child) ->
node.appendChild child
# --------------------------------
# Sets display to default.
#
# @static
# @public
# @method displayDefault
# @param {Element} node The DOM element to manipulate
displayDefault: (node) ->
node.style[CSS_display] = ''
return
# --------------------------------
# Sets display to none. Doing this as a function saves a few bytes for
# the 'style.display' property and the 'none' literal.
#
# @static
# @public
# @method displayNone
# @param {Element} node The DOM element to manipulate
displayNone: (node) ->
node.style[CSS_display] = 'none'
return
# --------------------------------
# Sets position style attribute to default.
#
# @static
# @public
# @method positionDefault
# @param {Element} node The DOM element to manipulate
positionDefault: (node) ->
node.style[CSS_position] = ''
return
# --------------------------------
# Sets position style attribute to absolute.
#
# @static
# @public
# @method positionAbsolute
# @param {Element} node The DOM element to manipulate
positionAbsolute: (node) ->
node.style[CSS_position] = 'absolute'
return
# --------------------------------
# Inserts a new child before a given sibling.
#
# @static
# @public
# @method insertBefore
# @param {Node} newChild The node to insert
# @param {Node} oldChild The sibling node
# @return {Node} A reference to the new child
insertBefore: (newChild, oldChild) ->
oldChild.parentNode.insertBefore newChild, oldChild
# --------------------------------
# Replaces an old child node with a new child node.
#
# @static
# @public
# @method replaceChild
# @param {Node} newChild The new child to append
# @param {Node} oldChild The old child to remove
# @return {Node} The replaced node
replaceChild: (newChild, oldChild) ->
oldChild.parentNode.replaceChild newChild, oldChild
# --------------------------------
# Removes a node from the DOM.
#
# @static
# @public
# @method removeNode
# @param {Node} node The node to remove
# @return {Node} The removed node
removeNode: (node) ->
Document.removeChild node.parentNode, node
# --------------------------------
# Remove a child from the specified (parent) node.
#
# @static
# @public
# @method removeChild
# @param {Element} node The parent element
# @param {Node} child The child-node to remove
# @return {Node} The removed node
removeChild: (node, child) ->
node.removeChild child
| true | ###
© Copyright 2013-2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
© Copyright 2006 Google Inc. <http://www.google.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
#~ require
{Constants:{
CSS_display,
CSS_position
}} = require '../Core/Constants'
{Node:{
DOCUMENT_NODE #, DOCUMENT_FRAGMENT_NODE
}} = require './Node'
{Utility:{
camelize,
dashify
}} = require '../Core/Utility'
createTraversal = require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/Level1NodeTypeMatcher').Level1NodeTypeMatcher.create
#!require('./Traversal/ElementTraversal').ElementTraversal.create
#!require('./Traversal/ElementChildren').ElementChildren.create
#!require('./Traversal/Level2ChildNodes').Level2ChildNodes.create
#!require('./Traversal/Level2NodeIterator').Level2NodeIterator.create
#!require('./Traversal/Level2TreeWalker').Level2TreeWalker.create
#~ export
exports = module?.exports ? this
# Document (≠ DOMDocument)
# ================================
# --------------------------------
# This module provides shortcuts depending on generic- or browser-based
# DOM implementations.
#
# @public
# @module Document
# @namespace goatee.Core
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @type {Object}
exports.Document = Document =
# --------------------------------
# Global target document reference. Defaults to `null` if `window.document`
# is not available.
#
# @static
# @public
# @property document
# @type {Document}
document: window?.document || null
# --------------------------------
# Get an element-node by its id
#
# @static
# @public
# @method getElementById
# @param {DOMString} id
# @param {Document} [doc=Document.document]
# @returns {Node|null}
getElementById: (id, doc) ->
(doc || Document.document).getElementById(id)
# --------------------------------
# Creates a new node in the given document
#
# @static
# @public
# @method createElement
# @param {DOMString} name The name of new element
# (i.e. the tag name)
# @param {Document} [doc=Document.document] The target document
# @return {Element} A newly constructed element
createElement: (name, doc) ->
(doc || Document.document).createElement(name)
# --------------------------------
# Creates a new text node in the given document.
#
# @static
# @public
# @method createTextNode
# @param {DOMString} text Text composing new text node
# @param {Document} [doc=Document.document] The target document
# @return {Text} A newly constructed text node
createTextNode: (text, doc) ->
(doc || Document.document).createTextNode text
# --------------------------------
# Traverses the element nodes in the DOM section underneath the given
# node and invokes the given callback as a method on every element
# node encountered.
#
# @static
# @public
# @method traverseElements
# @param {Element} node Parent element of the subtree to traverse
# @param {Function} callback Called on each node in the traversal
# @return {goatee.Dom.Traversal}
traverseElements: (node, callback) ->
createTraversal(callback).run node
# --------------------------------
# Test if an attribute exists.
#
# The implementation uses `Element.prototype.hasAttribute` if available,
# otherwise it's a simple redirect to `Document.getAttribute`.
#
# @static
# @public
# @method hasAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
hasAttribute: if HTMLElement?::hasAttribute?
then (node, name) -> node.hasAttribute name
else (node, name) -> Document.getAttribute(node, name)?
# --------------------------------
# Get an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method getAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to extract
# @return {DOMString|null} Resulting attribute
getAttribute: (node, name) ->
# NOTE(mesch): Neither in IE nor in Firefox, HTML DOM attributes implement
# namespaces. All items in the attribute collection have `null` localName
# and namespaceURI attribute values. In IE, we even encounter DIV elements
# that don't implement the method `getAttributeNS()`.
node.getAttribute name
# --------------------------------
# Set an attribute in the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method setAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to set
# @param {DOMString|Number} value Set attribute to this value
setAttribute: (node, name, value) ->
node.setAttribute name, value
return
# --------------------------------
# Remove an attribute from the DOM. Simple redirect to compress code.
#
# @static
# @public
# @method removeAttribute
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of parameter to remove
removeAttribute: (node, name) ->
node.removeAttribute(name)
return
# --------------------------------
# Test if an data-attribute exists.
#
# This is the place to implement faster alternatives, i.e. by using
# `hasAttribute` or the like.
#
# @static
# @public
# @method hasData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {Boolean} Flag indicating if the data-attribute exists
hasData: if (_dataSetAvailable = HTMLElement?::dataset? and DOMStringMap?)
then (node, name) ->
node.dataset?[camelize name]?
else (node, name) ->
Document.hasAttribute node, "data-#{dashify name}"
# --------------------------------
# Get an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `getAttribute` or the like.
#
# @static
# @public
# @method getData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to extract
# @return {DOMString|null} Resulting data-attribute
getData: if _dataSetAvailable
then (node, name) ->
if node.dataset? then node.dataset[camelize name] else null
else (node, name) ->
Document.getAttribute node, "data-#{dashify name}"
# --------------------------------
# Set an data-attribute in the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `setAttribute` or the like.
#
# @static
# @public
# @method setData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to set
# @param {DOMString} value Set data-attribute to this value
setData: if _dataSetAvailable
then (node, name, value) ->
node.dataset[camelize name] = value
return
else (node, name, value) ->
Document.setAttribute node, "data-#{dashify name}", value
return
# --------------------------------
# Remove an data-attribute from the DOM.
#
# This is the place to implement faster alternatives, i.e. by using
# `removeAttribute` or the like.
#
# @static
# @public
# @method removeData
# @param {Element} node Element to interrogate
# @param {DOMString} name Name of data-attribute to remove
removeData: if _dataSetAvailable
then (node, name) ->
delete node.dataset[camelize name]
return
else (node, name) ->
Document.removeAttribute node, "data-#{dashify name}"
return
# --------------------------------
# Clone a node in the DOM.
#
# @static
# @public
# @method cloneNode
# @param {Node} node Node to clone
# @return {Node} Cloned node
cloneNode: (node) ->
# NOTE(mesch): we never so far wanted to use `node.cloneNode(false);`,
# hence we default to `true` (=deep clone).
node.cloneNode true
# --------------------------------
# Clone a element in the DOM. Alias of `Document.cloneNode(node);` above.
#
# @static
# @public
# @method cloneElement
# @param {Element} element Element to clone
# @return {Element} Cloned element
cloneElement: (element) ->
@cloneNode element
# --------------------------------
# Returns the document owner of the given element. In particular, returns
# `window.document` if node is null or the browser does not support the
# `ownerDocument`-method. Returns the node, if the node is a document itself.
#
# @static
# @public
# @method ownerDocument
# @param {Node} [node] The node whose ownerDocument
# is requested
# @param {Document} [doc=Document.document] The optional fallback-value
# @return {Document} The owner-document or if
# unsupported `window.document`
ownerDocument: (node, doc) ->
# TODO: What about document-fragment-nodes ?
return doc || Document.document if not node? # …
#!… or `node.nodeType == DOCUMENT_FRAGMENT_NODE`
# We deliberately enforce equality instead of identity here.
return node if `node.nodeType == DOCUMENT_NODE`
return node.ownerDocument || doc || Document.document
# --------------------------------
# Appends a new child to the specified (parent) node.
#
# @static
# @public
# @method appendChild
# @param {Element} node The parent element
# @param {Node} child The child-node to append
# @return {Node} The newly appended node
appendChild: (node, child) ->
node.appendChild child
# --------------------------------
# Sets display to default.
#
# @static
# @public
# @method displayDefault
# @param {Element} node The DOM element to manipulate
displayDefault: (node) ->
node.style[CSS_display] = ''
return
# --------------------------------
# Sets display to none. Doing this as a function saves a few bytes for
# the 'style.display' property and the 'none' literal.
#
# @static
# @public
# @method displayNone
# @param {Element} node The DOM element to manipulate
displayNone: (node) ->
node.style[CSS_display] = 'none'
return
# --------------------------------
# Sets position style attribute to default.
#
# @static
# @public
# @method positionDefault
# @param {Element} node The DOM element to manipulate
positionDefault: (node) ->
node.style[CSS_position] = ''
return
# --------------------------------
# Sets position style attribute to absolute.
#
# @static
# @public
# @method positionAbsolute
# @param {Element} node The DOM element to manipulate
positionAbsolute: (node) ->
node.style[CSS_position] = 'absolute'
return
# --------------------------------
# Inserts a new child before a given sibling.
#
# @static
# @public
# @method insertBefore
# @param {Node} newChild The node to insert
# @param {Node} oldChild The sibling node
# @return {Node} A reference to the new child
insertBefore: (newChild, oldChild) ->
oldChild.parentNode.insertBefore newChild, oldChild
# --------------------------------
# Replaces an old child node with a new child node.
#
# @static
# @public
# @method replaceChild
# @param {Node} newChild The new child to append
# @param {Node} oldChild The old child to remove
# @return {Node} The replaced node
replaceChild: (newChild, oldChild) ->
oldChild.parentNode.replaceChild newChild, oldChild
# --------------------------------
# Removes a node from the DOM.
#
# @static
# @public
# @method removeNode
# @param {Node} node The node to remove
# @return {Node} The removed node
removeNode: (node) ->
Document.removeChild node.parentNode, node
# --------------------------------
# Remove a child from the specified (parent) node.
#
# @static
# @public
# @method removeChild
# @param {Element} node The parent element
# @param {Node} child The child-node to remove
# @return {Node} The removed node
removeChild: (node, child) ->
node.removeChild child
|
[
{
"context": "###*\n * TMS-black模块开发构建工具\n * @author [Pang.J.G]\n * @version [0.0.1]\n * @date [2016-01-20 00:01:",
"end": 46,
"score": 0.999840259552002,
"start": 38,
"tag": "NAME",
"value": "Pang.J.G"
},
{
"context": "ring'\n }).option(\"author\", {\n default: 'lmtdit',\... | gulpfile.coffee | lmtdit/widget-builder | 0 | ###*
* TMS-black模块开发构建工具
* @author [Pang.J.G]
* @version [0.0.1]
* @date [2016-01-20 00:01:12]
* @required [gulp]
###
fs = require 'fs'
path = require 'path'
gulp = require 'gulp'
gulpIf = require 'gulp-if'
gulpIg = require 'gulp-ignore'
_ = require 'lodash'
crypto = require 'crypto'
yargs = require 'yargs'
less = require 'gulp-less'
uglify = require 'uglify-js'
autopre = require 'gulp-autoprefixer'
plumber = require 'gulp-plumber'
{JSHINT} = require 'jshint'
gutil = require 'gulp-util'
log = gutil.log
color = gutil.colors
PluginError = gutil.PluginError
CleanCSS = require 'clean-css'
through2 = require 'through2'
# 设置运行的命令参数
argv = yargs.option("e", {
alias: 'env',
demand: true
default: 'local',
describe: color.cyan('项目的运行环境'),
type: 'string'
}).option("author", {
default: 'lmtdit',
describe: color.cyan('设置项目的作者'),
type: 'string'
}).option("email", {
default: 'lmtdit@gmail.com',
describe: color.cyan('设置项目作者的email'),
type: 'string'
}).option("hash", {
alias: 'hashlen',
default: 10,
describe: color.cyan('设置生产文件名的hash长度'),
type: 'number'
}).option("cdn", {
default: '',
describe: color.cyan('设置项目发布的cdn域名'),
type: 'string'
})
.help('h')
.alias('h', 'help')
.argv
# 全局的配置
tasks = argv._.concat([])
globalNameSpace = '_LIB_' #全局的命名空间
root = process.env.INIT_CWD
defaultTasks = ['less','js','watch','default','public','public-all']
global.Cache = {}
try
global.Cache = require '../global/globalMap.json'
catch error
# 一些正则
REGEX =
uri: /globalUri\(('|")([^'|^"]*)(\w+).(png|gif|jpg|html|js|css)('|")\)/g
uriVal: /\([\s\S]*?\)/
cssBg: /url\([\S\s]*?\)/g
bgUrl: /\([\s\S]*?.(png|jpg|gif)\)/
### ******************** base functions ******************** ###
Tools =
# md5
md5: (source) ->
_buf = new Buffer(source)
_str = _buf.toString("binary")
return crypto.createHash('md5').update(_str, 'utf8').digest('hex')
# make dir
mkdirsSync: (dirpath, mode)->
if fs.existsSync(dirpath)
return true
else
if Tools.mkdirsSync path.dirname(dirpath), mode
fs.mkdirSync(dirpath, mode)
return true
# 错误警报
errHandler:(e,cb)->
gutil.beep()
gutil.beep()
log e
# 压缩css/js源码
minify: (source,type)->
type = type or "js"
switch type
when 'css'
cssOpt = {
keepBreaks:false
compatibility:
properties:
iePrefixHack:true
ieSuffixHack:true
}
mangled = new CleanCSS(cssOpt).minify(source)
source = mangled.styles.replace(/\/\*([\s\S]*?)\*\//g, '')
when 'js'
source = Tools._replaceUriValue(source)
mangled = uglify.minify(source,{fromString: true})
source = mangled.code
when 'html'
source = source.replace(/<!--([\s\S]*?)-->/g, '')
.replace(/\/\*([\s\S]*?)\*\//g, '')
.replace(/^\s+$/g, '')
.replace(/\n/g, '')
.replace(/\t/g, '')
.replace(/\r/g, '')
.replace(/\n\s+/g, ' ')
.replace(/\s+/g, ' ')
.replace(/>([\n\s+]*?)</g,'><')
return source
# 获取文件
getFileSync: (file, encoding)->
_encoding = encoding or 'utf8'
fileCon = ''
if fs.existsSync(file)
stats = fs.statSync(file)
if stats.isFile()
fileCon = fs.readFileSync(file, _encoding)
return fileCon
# 读取json文件内容
getJSONSync: (file) ->
fileCon = Tools.getFileSync(file)
data = {}
if fileCon
fileCon = fileCon.replace(/\/\/[^\n]*/g, '')
try
data = JSON.parse(fileCon)
catch e
console.log e
return data
# 写入文件
writeFile: (file, source,offlog)->
# 文件存在并且MD5值一样,则不重复写入
name = path.basename(file);
if fs.existsSync(file) and Tools.md5(Tools.getFileSync(file)) is Tools.md5(source)
return false
Tools.mkdirsSync(path.dirname(file))
fs.writeFileSync(file, source, 'utf8')
offlog or log("'" + color.cyan(file) + "'", "build success.")
# 获取文件夹下的一级目录列表
getFolders: (fPath)->
folders = []
try
fs.readdirSync(fPath).forEach (v)->
folder = path.join fPath,v
if fs.statSync(folder).isDirectory() and v.indexOf('.') != 0
folders.push v
catch error
# log error.Error
return folders
# 获取文件夹下的文件列表列表
getFiles: (fPath,type)->
list = []
try
fs.readdirSync(fPath).forEach (v)->
file = path.join fPath,v
if fs.existsSync(file) and v.indexOf('.' + type) > 0
list.push file
catch error
# log error.Error
return list
# 生成 debug 文件路径
_setDegbugPath: (parse)->
parse.base = "_debug." + parse.name + parse.ext
return path.format(parse)
# 获取原文件名
_setSrcPath: (parse)->
parse.base = parse.name.replace('_debug.','') + parse.ext
return path.format(parse)
# 生成 dist 文件路径
_setDistPath: (parse,hash)->
parse.base = parse.name + "." + hash.substring(0,argv.hash) + parse.ext
return path.format(parse)
# 生成缓存的类型
_setCacheType: (parse)->
return parse.ext.replace('.','')
# 从缓存中读取 dist 文件路径
_getDistName: (type,name)->
if _.has(global.Cache,type + "Map") and global.Cache[type + "Map"][name]
return global.Cache[type + "Map"][name].distPath
else
return name
# 替换JS中的内嵌资源
# 例如:globalUri("dir/name.ext")-->globalUri("dir/name.md5hash.ext")
_replaceUriValue: (source)->
return source.replace REGEX.uri,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
_valArr = _val.split('/')
type = _valArr.shift()
name = _valArr.join('/')
distName = Tools._getDistName(type,name)
return res.replace(name,distName)
# 替换css中的背景图片或字体文件引用资源
# 例如:url('xxxxx.xxx')-->url('xxxxx.md5hash.xxx')
_replaceCssBg: (source)->
return source.replace REGEX.cssBg,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
if _val.indexOf('font/') != -1
name = _val.split('font/')[1]
.split(/(\?|#)/)[0]
distName = Tools._getDistName('font',name)
return res.replace(name,distName)
else if _val.indexOf('img/') != -1
name = _val.split('img/')[1]
distName = Tools._getDistName('img',name)
return res.replace(name,distName)
else
return res
# 替换css中的背景图片为动态请求
_replaceBgUri: (source)->
return source.replace REGEX.cssBg,(str)->
val = str.replace REGEX.bgUrl,($1)->
img = $1.replace(/[\(\)'"]/g,"")
if $1.indexOf('/global/img/') != -1
img = img.replace(/\/\w+\/img/,'img')
return "('\"+lib.globalUri(\"#{img}\")+\"')"
else
img = img.replace /\/\w+\/img/,'img'
return "('\"+lib.widgetUri(\"#{img}\")+\"')"
return val
###*
* npm版本比较
* Compares two software version numbers (e.g. "1.7.1" or "1.2.1").
*
* @parse string newVer eg:"1.1","1.0.2","1.0.2.0"
* @parse string oldVer
* @return <,return -1
* =,return 0
* >,return 1
* eg:
* compareVersion("0.0.2","0.0.1") //1
* compareVersion("0.0.3","0.0.3") //0
* compareVersion("0.2.0","1.0.0") //-1
* compareVersion("1.0.0","0.9.0") //1
* compareVersion('0.0.2.2.0',"0.0.2.3") //-1
* compareVersion('0.0.2.0',"0.0.2") //-1
* compareVersion('0.0.2',"0.0.2.0") //-1
###
compareVer: (newVer,oldVer)->
if typeof newVer + typeof oldVer != 'stringstring'
return false
if newVer == oldVer
return 0
else
newArr = newVer.split('.')
oldArr = oldVer.split('.')
newLen = newArr.length
oldLen = oldArr.length
maxLen = Math.max(newLen,oldLen)
pushZero = ->
if newArr.length < maxLen
newArr.push(0)
else if oldArr.length < maxLen
oldArr.push(0)
newArr.length != oldArr.length && pushZero()
newLen != oldLen && pushZero()
if newArr.toString() == oldArr.toString()
return if newLen > oldLen then 1 else -1
else
isTrue = -1
compareNum = ->
_new = ~~newArr.shift()
_old = ~~oldArr.shift()
_new > _old && isTrue = 1
_new == _old && newArr.length > 0 && compareNum()
compareNum()
return isTrue
_getTagValue: (str)->
return str.split("[")[1].split("]")[0].replace(/\'/g,"\"")
_getDepArr: (str)->
key = "[" + Tools._getTagValue(str) + "]"
return eval '(' + key + ')'
tips:(res)->
log "'" + color.cyan(res.path.replace(root,'')) + "'","was #{res.type}."
# V1 = "0.0.2.0"
# V2 = "0.0.2"
# log V1 + ':' + V2 + "===>",color.red(Tools.compareVer(V1,V2))
# return false
### ******************** 构建任务 ******************** ###
# 任务列表的容器
taskList = []
# 当前widget Map
widgetMap = {}
# js的依赖
widgetDeps = {}
# js作者的容器
authors = {}
# 版本的容器
emails = {}
# 历史版本的容器
history = {}
# 版本的容器
versions = {}
# js简介的容器
descriptions = {}
# widgetMap file
widgetMapFile = path.join root,'widgetMap.join'
# 任务构建类
class build
# 参数初始化
constructor:(@name)->
@taskName = "widget_#{@name}"
@srcPath = "./#{@name}/src/"
@distPath = "./#{@name}/dist/"
@curPkg = "./#{@name}/pkg.json"
@env = argv.e
# 监控的文件
@files = [
path.join(@srcPath, '**/*.{less,html}')
path.join(@srcPath, '**/*.js')
path.join(@srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
@lessFiles = Tools.getFiles(@srcPath + 'less','less')
@htmlFiles = Tools.getFiles(@srcPath + 'tpl','html')
@jsModFiles = Tools.getFiles(@srcPath + 'mods','js')
# 一个js是否存在错误的标记
# 如果此标记为 false 时,停止后续的构建任务
@isJsHasNoError = true
# 初始化 pkg 参数
widgetMap[@taskName] = {}
widgetDeps[@taskName] = {}
history[@taskName] = {}
authors[@taskName] = ""
emails[@taskName] = ""
versions[@taskName] = ""
descriptions[@taskName] = ""
# tpl的容器
@tplSource = ''
# css的容器
@cssSource = ''
# 获取当前Widget的历史版本
_getHistory:->
_this = @
taskName = _this.taskName
distPath = _this.distPath
dirs = Tools.getFolders(distPath)
dirs.forEach (dir)->
_dirPath = path.join distPath,dir
_srcName = path.join _dirPath,'index.js'
_debugName = Tools._setDegbugPath(path.parse(_srcName))
_fileCon = Tools.minify Tools.getFileSync(_debugName)
_hash = Tools.md5(_fileCon)
_distName = Tools._setDistPath(path.parse(_srcName),_hash)
history[taskName][dir] =
hash: _hash
debugUri: Tools._setDegbugPath(path.parse(_srcName))
distUri: _distName
Tools.writeFile(_srcName,_fileCon)
Tools.writeFile(_distName,_fileCon)
# 读取js源文件注释中的参数
_getJsInfo: (file)->
taskName = @taskName
return through2.obj (file, enc, callback)->
source = file.contents.toString()
try
# 获取 author
_matchAu = source.match(/@author\s+\[[\s\S]*?\]/)
authors[taskName] = if _matchEm then Tools._getTagValue(_matchAu[0]) else argv.author
# 获取 author
_matchEm = source.match(/@email\s+\[[\s\S]*?\]/)
emails[taskName] = if _matchEm then Tools._getTagValue(_matchEm[0]) else argv.email
# 获取description
_ver = source.match(/@version\s+\[[\s\S]*?\]/)[0]
versions[taskName] = Tools._getTagValue(_ver)
# 获取description
_desc = source.match(/@description\s+\[[\s\S]*?\]/)[0]
descriptions[taskName] = Tools._getTagValue(_desc)
# 获取global deps
_global = source.match(/@require_global\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].global = Tools._getDepArr(_global)
# 获取widget deps
_widget = source.match(/@require_widget\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].widget = Tools._getDepArr(_widget)
catch error
log "'" + color.red(taskName) + "'",error
return callback(null,file)
# 获取js子模块的内容队列
_getJsMods: ->
_this = @
fileCon = []
_this.jsModFiles.length > 0 && _this.jsModFiles.forEach (val)->
fileCon.push Tools.getFileSync(val)
return fileCon
# 合并js依赖的子模块
_comboJs: ->
_this = @
combos = []
return through2.obj (file, enc, callback)->
if file.isNull()
return callback(null, file)
else if file.isStream()
throw new Error('Streams are not supported!')
try
jsModSource = _this._getJsMods()
_this.cssSource && combos.push(_this.cssSource)
_this.tplSource && combos.push(_this.tplSource)
combos = combos.concat(jsModSource)
combos.push(file.contents.toString())
jsCon = combos.join('\n')
file.contents = new Buffer(jsCon)
return callback(null,file)
catch error
return callback(new PluginError('catchError',err))
# 生成debug状态下的index.js文件名
_debugJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
source = file.contents.toString()
version = versions[_this.taskName]
debugPath = Tools._setDegbugPath(path.parse(file.relative))
file.path = path.join _this.distPath,version,debugPath
widgetMap[taskName].version = versions[taskName]
widgetMap[taskName].debugUri = file.path
return callback(null,file)
catch error
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 压缩index.js
_miniJs: ->
_this = @
return through2.obj (file, enc, callback)->
try
file.contents = new Buffer(Tools.minify(file.contents.toString()))
srcName = Tools._setSrcPath(path.parse(file.relative))
file.path = path.join _this.distPath,srcName
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 给压缩后的index.js加上md5戳
_renameJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
hash = Tools.md5 file.contents.toString()
distName = Tools._setDistPath(path.parse(file.relative),hash)
file.path = path.join _this.distPath,distName
widgetMap[taskName].distUri = file.path
widgetMap[taskName].hash = hash
return callback(null,file)
catch err
return callback(new PluginError('catchError',err))
# 校验js语法
jsHint: (cb)->
_this = @
_cb = cb or ->
_jsFiles = [
path.join(_this.srcPath, '**/*.js')
path.join(_this.srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
gulp.src _jsFiles
.pipe through2.obj (file, enc, callback)->
_source = file.contents.toString()
fileName = file.path.toString().split('widget/')[1]
try
log '\'' + color.cyan(fileName) + '\'',color.yellow("语法检测开始:")
# console.log _source
!!JSHINT(_source)
JSHINT.errors.filter (error)->
if error && error.code && error.code not in ['W093','W030']
# log error
log "error in line:",color.magenta(error.line)
log "error massage:",color.yellow(error.reason)
log '\'' + color.cyan(fileName) + '\'',color.green("语法检测结束!")
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
.on 'end', ->
_cb()
# js构建
js: (cb)->
_this = @
_cb = cb or ->
gulp.src path.join(_this.srcPath, 'index.js')
.pipe plumber({errorHandler: Tools.errHandler})
.pipe _this._getJsInfo()
.pipe _this._comboJs()
.pipe _this._debugJs()
.pipe gulp.dest(_this.distPath)
.pipe gulpIg.exclude(_this.env == 'local')
.pipe _this._miniJs()
.pipe gulp.dest(_this.distPath)
.pipe _this._renameJs()
.pipe gulp.dest(_this.distPath)
.on 'end', ->
_cb()
html: (cb)->
_this = @
_cb = cb or ->
tplPath = _this.srcPath + 'tpl'
modName = _this.taskName
tplData = {}
num = 0
try
fs.readdirSync(tplPath).forEach (file)->
_filePath = path.join(tplPath, file)
if fs.statSync(_filePath).isFile() and file.indexOf('.html') != -1 and file.indexOf('.') != 0
num++
fileName = path.basename(file,'.html')
source = fs.readFileSync(_filePath, 'utf8')
tplData[fileName] = Tools.minify(source,'html')
if num > 0
_this.tplSource = "(function(lib){\n lib.#{modName}_tpl = #{JSON.stringify(tplData)};\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
# fs.writeFileSync path.join(_this.srcPath,"_tpl.js"), tplSource, 'utf8'
# log 'tplTojs done!'
else
log 'no tpl todo!'
_cb()
catch error
log error
# less构建
less: (cb)->
_this = @
_cb = cb or ->
cssCon = []
modName = _this.taskName
gulp.src _this.srcPath + 'less/*.less'
.pipe plumber({errorHandler: Tools.errHandler})
.pipe less
compress: false
.pipe autopre()
.on 'data',(res)->
cssCon.push res.contents.toString()
.on 'end', ->
try
res = Tools.minify(cssCon.join('\n'),'css')
cssSource = "(function(lib){\n var _css = \"#{res}\";\n lib.#{modName}_css = _css;\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
_this.cssSource = Tools._replaceBgUri(cssSource)
# Tools.writeFile(_this.srcPath + "_css.js",cssSource)
_cb()
catch error
log error
cb(error)
# 读取上一次保存的 pkg.json
getPkg: ->
return Tools.getJSONSync(@curPkg)
# 设置 pkg.json
setPkg: ->
@_getHistory()
taskName = @taskName
_oldPkg = @getPkg()
# 如果命令不带版本参数,则赋值上次的版本
# 如果上次的版本不存在,则默认值为 ‘0.0.1’
_oldPkg.version = '0.0.1' if !_oldPkg.version
# 比较前后两个版本
_isNewVersion = Tools.compareVer(versions[taskName],_oldPkg.version)
# 设置新的pkg
_pkg = {}
_pkg.name = taskName
if _isNewVersion > -1
if _.isEmpty(_oldPkg)
_pkg = _.assing _oldPkg,_pkg
else
_pkg.version = versions[taskName]
_pkg.description = descriptions[taskName]
_pkg.author = authors[taskName]
_pkg.email = emails[taskName]
_pkg.hash = widgetMap[taskName].hash
_pkg.distUri = widgetMap[taskName].distUri
_pkg.debugUri = history[taskName][versions[taskName]].debugUri
_pkg.deps = widgetDeps[taskName]
_pkg.history = history[taskName]
Tools.writeFile @curPkg,JSON.stringify(_pkg,null,4)
widgetMap[taskName].deps = widgetDeps[taskName]
widgetMap[taskName].history = history[taskName]
else
log "'" + color.cyan(_pkg.name) + "'","版本不能低于","'" + color.red(_oldPkg.version) + "'"
# 注册gulp任务
registTask: =>
_this = @
_defTask = []
taskName = _this.name
taskList.push taskName
if _this.htmlFiles.length > 0
gulp.task "#{taskName}_html",->
_this.html()
_defTask.push("#{taskName}_html")
if _this.lessFiles.length > 0
gulp.task "#{taskName}_less",->
_this.less()
_defTask.push("#{taskName}_less")
gulp.task "#{taskName}_jsHint",_defTask,->
_this.jsHint() if _this.env is 'local'
gulp.task "#{taskName}_js",["#{taskName}_jsHint"],->
_this.js()
gulp.task "#{taskName}_pkg",["#{taskName}_js"],->
_this.isJsHasNoError or log color.red("#{taskName} 组件存在语法错误")
_this.setPkg()
gulp.task "#{taskName}_watch",->
gulp.watch _this.files,(res)->
Tools.tips(res)
try
_ext = path.extname(res.path).replace(/^\./,'')
_task = "#{taskName}_#{_ext}"
gulp.start(_task)
catch error
log error
gulp.task taskName,["#{taskName}_pkg"],->
gulp.start("#{taskName}_watch") if _this.env is 'local'
# 生成widget项目的全部task
(->
blocks = Tools.getFolders(root)
blocks.forEach (block)->
if block not in defaultTasks and block isnt 'node_modules'
new build(block).registTask()
)()
# 生成全部map
gulp.task "map",->
oldMap = Tools.getJSONSync widgetMapFile
newMap = _.assign {},oldMap,widgetMap
# console.log newMap
Tools.writeFile(widgetMapFile,JSON.stringify(newMap,null,4))
# 定义 启动任务
if tasks.length == 0
gulp.task 'default',->
console.log "请设置需要构建的项目: ",taskList.concat(['public'])
else if tasks.shift() == 'public'
# log color.red(tasks.length)
if tasks.length == 0
gulp.task 'public',->
tasks.length == 0 && log "请设置需要发布的项目: ",taskList.concat('all')
else
if tasks[0] == 'all'
# console.log taskList
gulp.task 'all',taskList
gulp.task 'public',['all'],->
gulp.start 'map' #if argv.e is 'local'
else
gulp.task 'public',tasks,->
| 194125 | ###*
* TMS-black模块开发构建工具
* @author [<NAME>]
* @version [0.0.1]
* @date [2016-01-20 00:01:12]
* @required [gulp]
###
fs = require 'fs'
path = require 'path'
gulp = require 'gulp'
gulpIf = require 'gulp-if'
gulpIg = require 'gulp-ignore'
_ = require 'lodash'
crypto = require 'crypto'
yargs = require 'yargs'
less = require 'gulp-less'
uglify = require 'uglify-js'
autopre = require 'gulp-autoprefixer'
plumber = require 'gulp-plumber'
{JSHINT} = require 'jshint'
gutil = require 'gulp-util'
log = gutil.log
color = gutil.colors
PluginError = gutil.PluginError
CleanCSS = require 'clean-css'
through2 = require 'through2'
# 设置运行的命令参数
argv = yargs.option("e", {
alias: 'env',
demand: true
default: 'local',
describe: color.cyan('项目的运行环境'),
type: 'string'
}).option("author", {
default: 'lmtdit',
describe: color.cyan('设置项目的作者'),
type: 'string'
}).option("email", {
default: '<EMAIL>',
describe: color.cyan('设置项目作者的email'),
type: 'string'
}).option("hash", {
alias: 'hashlen',
default: 10,
describe: color.cyan('设置生产文件名的hash长度'),
type: 'number'
}).option("cdn", {
default: '',
describe: color.cyan('设置项目发布的cdn域名'),
type: 'string'
})
.help('h')
.alias('h', 'help')
.argv
# 全局的配置
tasks = argv._.concat([])
globalNameSpace = '_LIB_' #全局的命名空间
root = process.env.INIT_CWD
defaultTasks = ['less','js','watch','default','public','public-all']
global.Cache = {}
try
global.Cache = require '../global/globalMap.json'
catch error
# 一些正则
REGEX =
uri: /globalUri\(('|")([^'|^"]*)(\w+).(png|gif|jpg|html|js|css)('|")\)/g
uriVal: /\([\s\S]*?\)/
cssBg: /url\([\S\s]*?\)/g
bgUrl: /\([\s\S]*?.(png|jpg|gif)\)/
### ******************** base functions ******************** ###
Tools =
# md5
md5: (source) ->
_buf = new Buffer(source)
_str = _buf.toString("binary")
return crypto.createHash('md5').update(_str, 'utf8').digest('hex')
# make dir
mkdirsSync: (dirpath, mode)->
if fs.existsSync(dirpath)
return true
else
if Tools.mkdirsSync path.dirname(dirpath), mode
fs.mkdirSync(dirpath, mode)
return true
# 错误警报
errHandler:(e,cb)->
gutil.beep()
gutil.beep()
log e
# 压缩css/js源码
minify: (source,type)->
type = type or "js"
switch type
when 'css'
cssOpt = {
keepBreaks:false
compatibility:
properties:
iePrefixHack:true
ieSuffixHack:true
}
mangled = new CleanCSS(cssOpt).minify(source)
source = mangled.styles.replace(/\/\*([\s\S]*?)\*\//g, '')
when 'js'
source = Tools._replaceUriValue(source)
mangled = uglify.minify(source,{fromString: true})
source = mangled.code
when 'html'
source = source.replace(/<!--([\s\S]*?)-->/g, '')
.replace(/\/\*([\s\S]*?)\*\//g, '')
.replace(/^\s+$/g, '')
.replace(/\n/g, '')
.replace(/\t/g, '')
.replace(/\r/g, '')
.replace(/\n\s+/g, ' ')
.replace(/\s+/g, ' ')
.replace(/>([\n\s+]*?)</g,'><')
return source
# 获取文件
getFileSync: (file, encoding)->
_encoding = encoding or 'utf8'
fileCon = ''
if fs.existsSync(file)
stats = fs.statSync(file)
if stats.isFile()
fileCon = fs.readFileSync(file, _encoding)
return fileCon
# 读取json文件内容
getJSONSync: (file) ->
fileCon = Tools.getFileSync(file)
data = {}
if fileCon
fileCon = fileCon.replace(/\/\/[^\n]*/g, '')
try
data = JSON.parse(fileCon)
catch e
console.log e
return data
# 写入文件
writeFile: (file, source,offlog)->
# 文件存在并且MD5值一样,则不重复写入
name = path.basename(file);
if fs.existsSync(file) and Tools.md5(Tools.getFileSync(file)) is Tools.md5(source)
return false
Tools.mkdirsSync(path.dirname(file))
fs.writeFileSync(file, source, 'utf8')
offlog or log("'" + color.cyan(file) + "'", "build success.")
# 获取文件夹下的一级目录列表
getFolders: (fPath)->
folders = []
try
fs.readdirSync(fPath).forEach (v)->
folder = path.join fPath,v
if fs.statSync(folder).isDirectory() and v.indexOf('.') != 0
folders.push v
catch error
# log error.Error
return folders
# 获取文件夹下的文件列表列表
getFiles: (fPath,type)->
list = []
try
fs.readdirSync(fPath).forEach (v)->
file = path.join fPath,v
if fs.existsSync(file) and v.indexOf('.' + type) > 0
list.push file
catch error
# log error.Error
return list
# 生成 debug 文件路径
_setDegbugPath: (parse)->
parse.base = "_debug." + parse.name + parse.ext
return path.format(parse)
# 获取原文件名
_setSrcPath: (parse)->
parse.base = parse.name.replace('_debug.','') + parse.ext
return path.format(parse)
# 生成 dist 文件路径
_setDistPath: (parse,hash)->
parse.base = parse.name + "." + hash.substring(0,argv.hash) + parse.ext
return path.format(parse)
# 生成缓存的类型
_setCacheType: (parse)->
return parse.ext.replace('.','')
# 从缓存中读取 dist 文件路径
_getDistName: (type,name)->
if _.has(global.Cache,type + "Map") and global.Cache[type + "Map"][name]
return global.Cache[type + "Map"][name].distPath
else
return name
# 替换JS中的内嵌资源
# 例如:globalUri("dir/name.ext")-->globalUri("dir/name.md5hash.ext")
_replaceUriValue: (source)->
return source.replace REGEX.uri,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
_valArr = _val.split('/')
type = _valArr.shift()
name = _valArr.join('/')
distName = Tools._getDistName(type,name)
return res.replace(name,distName)
# 替换css中的背景图片或字体文件引用资源
# 例如:url('xxxxx.xxx')-->url('xxxxx.md5hash.xxx')
_replaceCssBg: (source)->
return source.replace REGEX.cssBg,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
if _val.indexOf('font/') != -1
name = _val.split('font/')[1]
.split(/(\?|#)/)[0]
distName = Tools._getDistName('font',name)
return res.replace(name,distName)
else if _val.indexOf('img/') != -1
name = _val.split('img/')[1]
distName = Tools._getDistName('img',name)
return res.replace(name,distName)
else
return res
# 替换css中的背景图片为动态请求
_replaceBgUri: (source)->
return source.replace REGEX.cssBg,(str)->
val = str.replace REGEX.bgUrl,($1)->
img = $1.replace(/[\(\)'"]/g,"")
if $1.indexOf('/global/img/') != -1
img = img.replace(/\/\w+\/img/,'img')
return "('\"+lib.globalUri(\"#{img}\")+\"')"
else
img = img.replace /\/\w+\/img/,'img'
return "('\"+lib.widgetUri(\"#{img}\")+\"')"
return val
###*
* npm版本比较
* Compares two software version numbers (e.g. "1.7.1" or "1.2.1").
*
* @parse string newVer eg:"1.1","1.0.2","1.0.2.0"
* @parse string oldVer
* @return <,return -1
* =,return 0
* >,return 1
* eg:
* compareVersion("0.0.2","0.0.1") //1
* compareVersion("0.0.3","0.0.3") //0
* compareVersion("0.2.0","1.0.0") //-1
* compareVersion("1.0.0","0.9.0") //1
* compareVersion('0.0.2.2.0',"0.0.2.3") //-1
* compareVersion('0.0.2.0',"0.0.2") //-1
* compareVersion('0.0.2',"0.0.2.0") //-1
###
compareVer: (newVer,oldVer)->
if typeof newVer + typeof oldVer != 'stringstring'
return false
if newVer == oldVer
return 0
else
newArr = newVer.split('.')
oldArr = oldVer.split('.')
newLen = newArr.length
oldLen = oldArr.length
maxLen = Math.max(newLen,oldLen)
pushZero = ->
if newArr.length < maxLen
newArr.push(0)
else if oldArr.length < maxLen
oldArr.push(0)
newArr.length != oldArr.length && pushZero()
newLen != oldLen && pushZero()
if newArr.toString() == oldArr.toString()
return if newLen > oldLen then 1 else -1
else
isTrue = -1
compareNum = ->
_new = ~~newArr.shift()
_old = ~~oldArr.shift()
_new > _old && isTrue = 1
_new == _old && newArr.length > 0 && compareNum()
compareNum()
return isTrue
_getTagValue: (str)->
return str.split("[")[1].split("]")[0].replace(/\'/g,"\"")
_getDepArr: (str)->
key = "[" + Tools._getTagValue(str) + "]"
return eval '(' + key + ')'
tips:(res)->
log "'" + color.cyan(res.path.replace(root,'')) + "'","was #{res.type}."
# V1 = "0.0.2.0"
# V2 = "0.0.2"
# log V1 + ':' + V2 + "===>",color.red(Tools.compareVer(V1,V2))
# return false
### ******************** 构建任务 ******************** ###
# 任务列表的容器
taskList = []
# 当前widget Map
widgetMap = {}
# js的依赖
widgetDeps = {}
# js作者的容器
authors = {}
# 版本的容器
emails = {}
# 历史版本的容器
history = {}
# 版本的容器
versions = {}
# js简介的容器
descriptions = {}
# widgetMap file
widgetMapFile = path.join root,'widgetMap.join'
# 任务构建类
class build
# 参数初始化
constructor:(@name)->
@taskName = "widget_#{@name}"
@srcPath = "./#{@name}/src/"
@distPath = "./#{@name}/dist/"
@curPkg = "./#{@name}/pkg.json"
@env = argv.e
# 监控的文件
@files = [
path.join(@srcPath, '**/*.{less,html}')
path.join(@srcPath, '**/*.js')
path.join(@srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
@lessFiles = Tools.getFiles(@srcPath + 'less','less')
@htmlFiles = Tools.getFiles(@srcPath + 'tpl','html')
@jsModFiles = Tools.getFiles(@srcPath + 'mods','js')
# 一个js是否存在错误的标记
# 如果此标记为 false 时,停止后续的构建任务
@isJsHasNoError = true
# 初始化 pkg 参数
widgetMap[@taskName] = {}
widgetDeps[@taskName] = {}
history[@taskName] = {}
authors[@taskName] = ""
emails[@taskName] = ""
versions[@taskName] = ""
descriptions[@taskName] = ""
# tpl的容器
@tplSource = ''
# css的容器
@cssSource = ''
# 获取当前Widget的历史版本
_getHistory:->
_this = @
taskName = _this.taskName
distPath = _this.distPath
dirs = Tools.getFolders(distPath)
dirs.forEach (dir)->
_dirPath = path.join distPath,dir
_srcName = path.join _dirPath,'index.js'
_debugName = Tools._setDegbugPath(path.parse(_srcName))
_fileCon = Tools.minify Tools.getFileSync(_debugName)
_hash = Tools.md5(_fileCon)
_distName = Tools._setDistPath(path.parse(_srcName),_hash)
history[taskName][dir] =
hash: _hash
debugUri: Tools._setDegbugPath(path.parse(_srcName))
distUri: _distName
Tools.writeFile(_srcName,_fileCon)
Tools.writeFile(_distName,_fileCon)
# 读取js源文件注释中的参数
_getJsInfo: (file)->
taskName = @taskName
return through2.obj (file, enc, callback)->
source = file.contents.toString()
try
# 获取 author
_matchAu = source.match(/@author\s+\[[\s\S]*?\]/)
authors[taskName] = if _matchEm then Tools._getTagValue(_matchAu[0]) else argv.author
# 获取 author
_matchEm = source.match(/@email\s+\[[\s\S]*?\]/)
emails[taskName] = if _matchEm then Tools._getTagValue(_matchEm[0]) else argv.email
# 获取description
_ver = source.match(/@version\s+\[[\s\S]*?\]/)[0]
versions[taskName] = Tools._getTagValue(_ver)
# 获取description
_desc = source.match(/@description\s+\[[\s\S]*?\]/)[0]
descriptions[taskName] = Tools._getTagValue(_desc)
# 获取global deps
_global = source.match(/@require_global\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].global = Tools._getDepArr(_global)
# 获取widget deps
_widget = source.match(/@require_widget\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].widget = Tools._getDepArr(_widget)
catch error
log "'" + color.red(taskName) + "'",error
return callback(null,file)
# 获取js子模块的内容队列
_getJsMods: ->
_this = @
fileCon = []
_this.jsModFiles.length > 0 && _this.jsModFiles.forEach (val)->
fileCon.push Tools.getFileSync(val)
return fileCon
# 合并js依赖的子模块
_comboJs: ->
_this = @
combos = []
return through2.obj (file, enc, callback)->
if file.isNull()
return callback(null, file)
else if file.isStream()
throw new Error('Streams are not supported!')
try
jsModSource = _this._getJsMods()
_this.cssSource && combos.push(_this.cssSource)
_this.tplSource && combos.push(_this.tplSource)
combos = combos.concat(jsModSource)
combos.push(file.contents.toString())
jsCon = combos.join('\n')
file.contents = new Buffer(jsCon)
return callback(null,file)
catch error
return callback(new PluginError('catchError',err))
# 生成debug状态下的index.js文件名
_debugJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
source = file.contents.toString()
version = versions[_this.taskName]
debugPath = Tools._setDegbugPath(path.parse(file.relative))
file.path = path.join _this.distPath,version,debugPath
widgetMap[taskName].version = versions[taskName]
widgetMap[taskName].debugUri = file.path
return callback(null,file)
catch error
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 压缩index.js
_miniJs: ->
_this = @
return through2.obj (file, enc, callback)->
try
file.contents = new Buffer(Tools.minify(file.contents.toString()))
srcName = Tools._setSrcPath(path.parse(file.relative))
file.path = path.join _this.distPath,srcName
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 给压缩后的index.js加上md5戳
_renameJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
hash = Tools.md5 file.contents.toString()
distName = Tools._setDistPath(path.parse(file.relative),hash)
file.path = path.join _this.distPath,distName
widgetMap[taskName].distUri = file.path
widgetMap[taskName].hash = hash
return callback(null,file)
catch err
return callback(new PluginError('catchError',err))
# 校验js语法
jsHint: (cb)->
_this = @
_cb = cb or ->
_jsFiles = [
path.join(_this.srcPath, '**/*.js')
path.join(_this.srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
gulp.src _jsFiles
.pipe through2.obj (file, enc, callback)->
_source = file.contents.toString()
fileName = file.path.toString().split('widget/')[1]
try
log '\'' + color.cyan(fileName) + '\'',color.yellow("语法检测开始:")
# console.log _source
!!JSHINT(_source)
JSHINT.errors.filter (error)->
if error && error.code && error.code not in ['W093','W030']
# log error
log "error in line:",color.magenta(error.line)
log "error massage:",color.yellow(error.reason)
log '\'' + color.cyan(fileName) + '\'',color.green("语法检测结束!")
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
.on 'end', ->
_cb()
# js构建
js: (cb)->
_this = @
_cb = cb or ->
gulp.src path.join(_this.srcPath, 'index.js')
.pipe plumber({errorHandler: Tools.errHandler})
.pipe _this._getJsInfo()
.pipe _this._comboJs()
.pipe _this._debugJs()
.pipe gulp.dest(_this.distPath)
.pipe gulpIg.exclude(_this.env == 'local')
.pipe _this._miniJs()
.pipe gulp.dest(_this.distPath)
.pipe _this._renameJs()
.pipe gulp.dest(_this.distPath)
.on 'end', ->
_cb()
html: (cb)->
_this = @
_cb = cb or ->
tplPath = _this.srcPath + 'tpl'
modName = _this.taskName
tplData = {}
num = 0
try
fs.readdirSync(tplPath).forEach (file)->
_filePath = path.join(tplPath, file)
if fs.statSync(_filePath).isFile() and file.indexOf('.html') != -1 and file.indexOf('.') != 0
num++
fileName = path.basename(file,'.html')
source = fs.readFileSync(_filePath, 'utf8')
tplData[fileName] = Tools.minify(source,'html')
if num > 0
_this.tplSource = "(function(lib){\n lib.#{modName}_tpl = #{JSON.stringify(tplData)};\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
# fs.writeFileSync path.join(_this.srcPath,"_tpl.js"), tplSource, 'utf8'
# log 'tplTojs done!'
else
log 'no tpl todo!'
_cb()
catch error
log error
# less构建
less: (cb)->
_this = @
_cb = cb or ->
cssCon = []
modName = _this.taskName
gulp.src _this.srcPath + 'less/*.less'
.pipe plumber({errorHandler: Tools.errHandler})
.pipe less
compress: false
.pipe autopre()
.on 'data',(res)->
cssCon.push res.contents.toString()
.on 'end', ->
try
res = Tools.minify(cssCon.join('\n'),'css')
cssSource = "(function(lib){\n var _css = \"#{res}\";\n lib.#{modName}_css = _css;\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
_this.cssSource = Tools._replaceBgUri(cssSource)
# Tools.writeFile(_this.srcPath + "_css.js",cssSource)
_cb()
catch error
log error
cb(error)
# 读取上一次保存的 pkg.json
getPkg: ->
return Tools.getJSONSync(@curPkg)
# 设置 pkg.json
setPkg: ->
@_getHistory()
taskName = @taskName
_oldPkg = @getPkg()
# 如果命令不带版本参数,则赋值上次的版本
# 如果上次的版本不存在,则默认值为 ‘0.0.1’
_oldPkg.version = '0.0.1' if !_oldPkg.version
# 比较前后两个版本
_isNewVersion = Tools.compareVer(versions[taskName],_oldPkg.version)
# 设置新的pkg
_pkg = {}
_pkg.name = taskName
if _isNewVersion > -1
if _.isEmpty(_oldPkg)
_pkg = _.assing _oldPkg,_pkg
else
_pkg.version = versions[taskName]
_pkg.description = descriptions[taskName]
_pkg.author = authors[taskName]
_pkg.email = emails[taskName]
_pkg.hash = widgetMap[taskName].hash
_pkg.distUri = widgetMap[taskName].distUri
_pkg.debugUri = history[taskName][versions[taskName]].debugUri
_pkg.deps = widgetDeps[taskName]
_pkg.history = history[taskName]
Tools.writeFile @curPkg,JSON.stringify(_pkg,null,4)
widgetMap[taskName].deps = widgetDeps[taskName]
widgetMap[taskName].history = history[taskName]
else
log "'" + color.cyan(_pkg.name) + "'","版本不能低于","'" + color.red(_oldPkg.version) + "'"
# 注册gulp任务
registTask: =>
_this = @
_defTask = []
taskName = _this.name
taskList.push taskName
if _this.htmlFiles.length > 0
gulp.task "#{taskName}_html",->
_this.html()
_defTask.push("#{taskName}_html")
if _this.lessFiles.length > 0
gulp.task "#{taskName}_less",->
_this.less()
_defTask.push("#{taskName}_less")
gulp.task "#{taskName}_jsHint",_defTask,->
_this.jsHint() if _this.env is 'local'
gulp.task "#{taskName}_js",["#{taskName}_jsHint"],->
_this.js()
gulp.task "#{taskName}_pkg",["#{taskName}_js"],->
_this.isJsHasNoError or log color.red("#{taskName} 组件存在语法错误")
_this.setPkg()
gulp.task "#{taskName}_watch",->
gulp.watch _this.files,(res)->
Tools.tips(res)
try
_ext = path.extname(res.path).replace(/^\./,'')
_task = "#{taskName}_#{_ext}"
gulp.start(_task)
catch error
log error
gulp.task taskName,["#{taskName}_pkg"],->
gulp.start("#{taskName}_watch") if _this.env is 'local'
# 生成widget项目的全部task
(->
blocks = Tools.getFolders(root)
blocks.forEach (block)->
if block not in defaultTasks and block isnt 'node_modules'
new build(block).registTask()
)()
# 生成全部map
gulp.task "map",->
oldMap = Tools.getJSONSync widgetMapFile
newMap = _.assign {},oldMap,widgetMap
# console.log newMap
Tools.writeFile(widgetMapFile,JSON.stringify(newMap,null,4))
# 定义 启动任务
if tasks.length == 0
gulp.task 'default',->
console.log "请设置需要构建的项目: ",taskList.concat(['public'])
else if tasks.shift() == 'public'
# log color.red(tasks.length)
if tasks.length == 0
gulp.task 'public',->
tasks.length == 0 && log "请设置需要发布的项目: ",taskList.concat('all')
else
if tasks[0] == 'all'
# console.log taskList
gulp.task 'all',taskList
gulp.task 'public',['all'],->
gulp.start 'map' #if argv.e is 'local'
else
gulp.task 'public',tasks,->
| true | ###*
* TMS-black模块开发构建工具
* @author [PI:NAME:<NAME>END_PI]
* @version [0.0.1]
* @date [2016-01-20 00:01:12]
* @required [gulp]
###
fs = require 'fs'
path = require 'path'
gulp = require 'gulp'
gulpIf = require 'gulp-if'
gulpIg = require 'gulp-ignore'
_ = require 'lodash'
crypto = require 'crypto'
yargs = require 'yargs'
less = require 'gulp-less'
uglify = require 'uglify-js'
autopre = require 'gulp-autoprefixer'
plumber = require 'gulp-plumber'
{JSHINT} = require 'jshint'
gutil = require 'gulp-util'
log = gutil.log
color = gutil.colors
PluginError = gutil.PluginError
CleanCSS = require 'clean-css'
through2 = require 'through2'
# 设置运行的命令参数
argv = yargs.option("e", {
alias: 'env',
demand: true
default: 'local',
describe: color.cyan('项目的运行环境'),
type: 'string'
}).option("author", {
default: 'lmtdit',
describe: color.cyan('设置项目的作者'),
type: 'string'
}).option("email", {
default: 'PI:EMAIL:<EMAIL>END_PI',
describe: color.cyan('设置项目作者的email'),
type: 'string'
}).option("hash", {
alias: 'hashlen',
default: 10,
describe: color.cyan('设置生产文件名的hash长度'),
type: 'number'
}).option("cdn", {
default: '',
describe: color.cyan('设置项目发布的cdn域名'),
type: 'string'
})
.help('h')
.alias('h', 'help')
.argv
# 全局的配置
tasks = argv._.concat([])
globalNameSpace = '_LIB_' #全局的命名空间
root = process.env.INIT_CWD
defaultTasks = ['less','js','watch','default','public','public-all']
global.Cache = {}
try
global.Cache = require '../global/globalMap.json'
catch error
# 一些正则
REGEX =
uri: /globalUri\(('|")([^'|^"]*)(\w+).(png|gif|jpg|html|js|css)('|")\)/g
uriVal: /\([\s\S]*?\)/
cssBg: /url\([\S\s]*?\)/g
bgUrl: /\([\s\S]*?.(png|jpg|gif)\)/
### ******************** base functions ******************** ###
Tools =
# md5
md5: (source) ->
_buf = new Buffer(source)
_str = _buf.toString("binary")
return crypto.createHash('md5').update(_str, 'utf8').digest('hex')
# make dir
mkdirsSync: (dirpath, mode)->
if fs.existsSync(dirpath)
return true
else
if Tools.mkdirsSync path.dirname(dirpath), mode
fs.mkdirSync(dirpath, mode)
return true
# 错误警报
errHandler:(e,cb)->
gutil.beep()
gutil.beep()
log e
# 压缩css/js源码
minify: (source,type)->
type = type or "js"
switch type
when 'css'
cssOpt = {
keepBreaks:false
compatibility:
properties:
iePrefixHack:true
ieSuffixHack:true
}
mangled = new CleanCSS(cssOpt).minify(source)
source = mangled.styles.replace(/\/\*([\s\S]*?)\*\//g, '')
when 'js'
source = Tools._replaceUriValue(source)
mangled = uglify.minify(source,{fromString: true})
source = mangled.code
when 'html'
source = source.replace(/<!--([\s\S]*?)-->/g, '')
.replace(/\/\*([\s\S]*?)\*\//g, '')
.replace(/^\s+$/g, '')
.replace(/\n/g, '')
.replace(/\t/g, '')
.replace(/\r/g, '')
.replace(/\n\s+/g, ' ')
.replace(/\s+/g, ' ')
.replace(/>([\n\s+]*?)</g,'><')
return source
# 获取文件
getFileSync: (file, encoding)->
_encoding = encoding or 'utf8'
fileCon = ''
if fs.existsSync(file)
stats = fs.statSync(file)
if stats.isFile()
fileCon = fs.readFileSync(file, _encoding)
return fileCon
# 读取json文件内容
getJSONSync: (file) ->
fileCon = Tools.getFileSync(file)
data = {}
if fileCon
fileCon = fileCon.replace(/\/\/[^\n]*/g, '')
try
data = JSON.parse(fileCon)
catch e
console.log e
return data
# 写入文件
writeFile: (file, source,offlog)->
# 文件存在并且MD5值一样,则不重复写入
name = path.basename(file);
if fs.existsSync(file) and Tools.md5(Tools.getFileSync(file)) is Tools.md5(source)
return false
Tools.mkdirsSync(path.dirname(file))
fs.writeFileSync(file, source, 'utf8')
offlog or log("'" + color.cyan(file) + "'", "build success.")
# 获取文件夹下的一级目录列表
getFolders: (fPath)->
folders = []
try
fs.readdirSync(fPath).forEach (v)->
folder = path.join fPath,v
if fs.statSync(folder).isDirectory() and v.indexOf('.') != 0
folders.push v
catch error
# log error.Error
return folders
# 获取文件夹下的文件列表列表
getFiles: (fPath,type)->
list = []
try
fs.readdirSync(fPath).forEach (v)->
file = path.join fPath,v
if fs.existsSync(file) and v.indexOf('.' + type) > 0
list.push file
catch error
# log error.Error
return list
# 生成 debug 文件路径
_setDegbugPath: (parse)->
parse.base = "_debug." + parse.name + parse.ext
return path.format(parse)
# 获取原文件名
_setSrcPath: (parse)->
parse.base = parse.name.replace('_debug.','') + parse.ext
return path.format(parse)
# 生成 dist 文件路径
_setDistPath: (parse,hash)->
parse.base = parse.name + "." + hash.substring(0,argv.hash) + parse.ext
return path.format(parse)
# 生成缓存的类型
_setCacheType: (parse)->
return parse.ext.replace('.','')
# 从缓存中读取 dist 文件路径
_getDistName: (type,name)->
if _.has(global.Cache,type + "Map") and global.Cache[type + "Map"][name]
return global.Cache[type + "Map"][name].distPath
else
return name
# 替换JS中的内嵌资源
# 例如:globalUri("dir/name.ext")-->globalUri("dir/name.md5hash.ext")
_replaceUriValue: (source)->
return source.replace REGEX.uri,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
_valArr = _val.split('/')
type = _valArr.shift()
name = _valArr.join('/')
distName = Tools._getDistName(type,name)
return res.replace(name,distName)
# 替换css中的背景图片或字体文件引用资源
# 例如:url('xxxxx.xxx')-->url('xxxxx.md5hash.xxx')
_replaceCssBg: (source)->
return source.replace REGEX.cssBg,(res)->
_val = res.match(REGEX.uriVal).shift().replace(/[\(\)"']/g,'')
if _val.indexOf('font/') != -1
name = _val.split('font/')[1]
.split(/(\?|#)/)[0]
distName = Tools._getDistName('font',name)
return res.replace(name,distName)
else if _val.indexOf('img/') != -1
name = _val.split('img/')[1]
distName = Tools._getDistName('img',name)
return res.replace(name,distName)
else
return res
# 替换css中的背景图片为动态请求
_replaceBgUri: (source)->
return source.replace REGEX.cssBg,(str)->
val = str.replace REGEX.bgUrl,($1)->
img = $1.replace(/[\(\)'"]/g,"")
if $1.indexOf('/global/img/') != -1
img = img.replace(/\/\w+\/img/,'img')
return "('\"+lib.globalUri(\"#{img}\")+\"')"
else
img = img.replace /\/\w+\/img/,'img'
return "('\"+lib.widgetUri(\"#{img}\")+\"')"
return val
###*
* npm版本比较
* Compares two software version numbers (e.g. "1.7.1" or "1.2.1").
*
* @parse string newVer eg:"1.1","1.0.2","1.0.2.0"
* @parse string oldVer
* @return <,return -1
* =,return 0
* >,return 1
* eg:
* compareVersion("0.0.2","0.0.1") //1
* compareVersion("0.0.3","0.0.3") //0
* compareVersion("0.2.0","1.0.0") //-1
* compareVersion("1.0.0","0.9.0") //1
* compareVersion('0.0.2.2.0',"0.0.2.3") //-1
* compareVersion('0.0.2.0',"0.0.2") //-1
* compareVersion('0.0.2',"0.0.2.0") //-1
###
compareVer: (newVer,oldVer)->
if typeof newVer + typeof oldVer != 'stringstring'
return false
if newVer == oldVer
return 0
else
newArr = newVer.split('.')
oldArr = oldVer.split('.')
newLen = newArr.length
oldLen = oldArr.length
maxLen = Math.max(newLen,oldLen)
pushZero = ->
if newArr.length < maxLen
newArr.push(0)
else if oldArr.length < maxLen
oldArr.push(0)
newArr.length != oldArr.length && pushZero()
newLen != oldLen && pushZero()
if newArr.toString() == oldArr.toString()
return if newLen > oldLen then 1 else -1
else
isTrue = -1
compareNum = ->
_new = ~~newArr.shift()
_old = ~~oldArr.shift()
_new > _old && isTrue = 1
_new == _old && newArr.length > 0 && compareNum()
compareNum()
return isTrue
_getTagValue: (str)->
return str.split("[")[1].split("]")[0].replace(/\'/g,"\"")
_getDepArr: (str)->
key = "[" + Tools._getTagValue(str) + "]"
return eval '(' + key + ')'
tips:(res)->
log "'" + color.cyan(res.path.replace(root,'')) + "'","was #{res.type}."
# V1 = "0.0.2.0"
# V2 = "0.0.2"
# log V1 + ':' + V2 + "===>",color.red(Tools.compareVer(V1,V2))
# return false
### ******************** 构建任务 ******************** ###
# 任务列表的容器
taskList = []
# 当前widget Map
widgetMap = {}
# js的依赖
widgetDeps = {}
# js作者的容器
authors = {}
# 版本的容器
emails = {}
# 历史版本的容器
history = {}
# 版本的容器
versions = {}
# js简介的容器
descriptions = {}
# widgetMap file
widgetMapFile = path.join root,'widgetMap.join'
# 任务构建类
class build
# 参数初始化
constructor:(@name)->
@taskName = "widget_#{@name}"
@srcPath = "./#{@name}/src/"
@distPath = "./#{@name}/dist/"
@curPkg = "./#{@name}/pkg.json"
@env = argv.e
# 监控的文件
@files = [
path.join(@srcPath, '**/*.{less,html}')
path.join(@srcPath, '**/*.js')
path.join(@srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
@lessFiles = Tools.getFiles(@srcPath + 'less','less')
@htmlFiles = Tools.getFiles(@srcPath + 'tpl','html')
@jsModFiles = Tools.getFiles(@srcPath + 'mods','js')
# 一个js是否存在错误的标记
# 如果此标记为 false 时,停止后续的构建任务
@isJsHasNoError = true
# 初始化 pkg 参数
widgetMap[@taskName] = {}
widgetDeps[@taskName] = {}
history[@taskName] = {}
authors[@taskName] = ""
emails[@taskName] = ""
versions[@taskName] = ""
descriptions[@taskName] = ""
# tpl的容器
@tplSource = ''
# css的容器
@cssSource = ''
# 获取当前Widget的历史版本
_getHistory:->
_this = @
taskName = _this.taskName
distPath = _this.distPath
dirs = Tools.getFolders(distPath)
dirs.forEach (dir)->
_dirPath = path.join distPath,dir
_srcName = path.join _dirPath,'index.js'
_debugName = Tools._setDegbugPath(path.parse(_srcName))
_fileCon = Tools.minify Tools.getFileSync(_debugName)
_hash = Tools.md5(_fileCon)
_distName = Tools._setDistPath(path.parse(_srcName),_hash)
history[taskName][dir] =
hash: _hash
debugUri: Tools._setDegbugPath(path.parse(_srcName))
distUri: _distName
Tools.writeFile(_srcName,_fileCon)
Tools.writeFile(_distName,_fileCon)
# 读取js源文件注释中的参数
_getJsInfo: (file)->
taskName = @taskName
return through2.obj (file, enc, callback)->
source = file.contents.toString()
try
# 获取 author
_matchAu = source.match(/@author\s+\[[\s\S]*?\]/)
authors[taskName] = if _matchEm then Tools._getTagValue(_matchAu[0]) else argv.author
# 获取 author
_matchEm = source.match(/@email\s+\[[\s\S]*?\]/)
emails[taskName] = if _matchEm then Tools._getTagValue(_matchEm[0]) else argv.email
# 获取description
_ver = source.match(/@version\s+\[[\s\S]*?\]/)[0]
versions[taskName] = Tools._getTagValue(_ver)
# 获取description
_desc = source.match(/@description\s+\[[\s\S]*?\]/)[0]
descriptions[taskName] = Tools._getTagValue(_desc)
# 获取global deps
_global = source.match(/@require_global\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].global = Tools._getDepArr(_global)
# 获取widget deps
_widget = source.match(/@require_widget\s+\[[\s\S]*?\]/)[0]
widgetDeps[taskName].widget = Tools._getDepArr(_widget)
catch error
log "'" + color.red(taskName) + "'",error
return callback(null,file)
# 获取js子模块的内容队列
_getJsMods: ->
_this = @
fileCon = []
_this.jsModFiles.length > 0 && _this.jsModFiles.forEach (val)->
fileCon.push Tools.getFileSync(val)
return fileCon
# 合并js依赖的子模块
_comboJs: ->
_this = @
combos = []
return through2.obj (file, enc, callback)->
if file.isNull()
return callback(null, file)
else if file.isStream()
throw new Error('Streams are not supported!')
try
jsModSource = _this._getJsMods()
_this.cssSource && combos.push(_this.cssSource)
_this.tplSource && combos.push(_this.tplSource)
combos = combos.concat(jsModSource)
combos.push(file.contents.toString())
jsCon = combos.join('\n')
file.contents = new Buffer(jsCon)
return callback(null,file)
catch error
return callback(new PluginError('catchError',err))
# 生成debug状态下的index.js文件名
_debugJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
source = file.contents.toString()
version = versions[_this.taskName]
debugPath = Tools._setDegbugPath(path.parse(file.relative))
file.path = path.join _this.distPath,version,debugPath
widgetMap[taskName].version = versions[taskName]
widgetMap[taskName].debugUri = file.path
return callback(null,file)
catch error
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 压缩index.js
_miniJs: ->
_this = @
return through2.obj (file, enc, callback)->
try
file.contents = new Buffer(Tools.minify(file.contents.toString()))
srcName = Tools._setSrcPath(path.parse(file.relative))
file.path = path.join _this.distPath,srcName
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
# 给压缩后的index.js加上md5戳
_renameJs: ->
_this = @
taskName = _this.taskName
return through2.obj (file, enc, callback)->
try
hash = Tools.md5 file.contents.toString()
distName = Tools._setDistPath(path.parse(file.relative),hash)
file.path = path.join _this.distPath,distName
widgetMap[taskName].distUri = file.path
widgetMap[taskName].hash = hash
return callback(null,file)
catch err
return callback(new PluginError('catchError',err))
# 校验js语法
jsHint: (cb)->
_this = @
_cb = cb or ->
_jsFiles = [
path.join(_this.srcPath, '**/*.js')
path.join(_this.srcPath, '*.js')
"!" + path.join(@srcPath, '_*.js')
]
gulp.src _jsFiles
.pipe through2.obj (file, enc, callback)->
_source = file.contents.toString()
fileName = file.path.toString().split('widget/')[1]
try
log '\'' + color.cyan(fileName) + '\'',color.yellow("语法检测开始:")
# console.log _source
!!JSHINT(_source)
JSHINT.errors.filter (error)->
if error && error.code && error.code not in ['W093','W030']
# log error
log "error in line:",color.magenta(error.line)
log "error massage:",color.yellow(error.reason)
log '\'' + color.cyan(fileName) + '\'',color.green("语法检测结束!")
return callback(null,file)
catch err
_this.isJsHasNoError = false
return callback(new PluginError('catchError',err))
.on 'end', ->
_cb()
# js构建
js: (cb)->
_this = @
_cb = cb or ->
gulp.src path.join(_this.srcPath, 'index.js')
.pipe plumber({errorHandler: Tools.errHandler})
.pipe _this._getJsInfo()
.pipe _this._comboJs()
.pipe _this._debugJs()
.pipe gulp.dest(_this.distPath)
.pipe gulpIg.exclude(_this.env == 'local')
.pipe _this._miniJs()
.pipe gulp.dest(_this.distPath)
.pipe _this._renameJs()
.pipe gulp.dest(_this.distPath)
.on 'end', ->
_cb()
html: (cb)->
_this = @
_cb = cb or ->
tplPath = _this.srcPath + 'tpl'
modName = _this.taskName
tplData = {}
num = 0
try
fs.readdirSync(tplPath).forEach (file)->
_filePath = path.join(tplPath, file)
if fs.statSync(_filePath).isFile() and file.indexOf('.html') != -1 and file.indexOf('.') != 0
num++
fileName = path.basename(file,'.html')
source = fs.readFileSync(_filePath, 'utf8')
tplData[fileName] = Tools.minify(source,'html')
if num > 0
_this.tplSource = "(function(lib){\n lib.#{modName}_tpl = #{JSON.stringify(tplData)};\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
# fs.writeFileSync path.join(_this.srcPath,"_tpl.js"), tplSource, 'utf8'
# log 'tplTojs done!'
else
log 'no tpl todo!'
_cb()
catch error
log error
# less构建
less: (cb)->
_this = @
_cb = cb or ->
cssCon = []
modName = _this.taskName
gulp.src _this.srcPath + 'less/*.less'
.pipe plumber({errorHandler: Tools.errHandler})
.pipe less
compress: false
.pipe autopre()
.on 'data',(res)->
cssCon.push res.contents.toString()
.on 'end', ->
try
res = Tools.minify(cssCon.join('\n'),'css')
cssSource = "(function(lib){\n var _css = \"#{res}\";\n lib.#{modName}_css = _css;\n return lib;\n})(window.#{globalNameSpace}||(window.#{globalNameSpace}={}));"
_this.cssSource = Tools._replaceBgUri(cssSource)
# Tools.writeFile(_this.srcPath + "_css.js",cssSource)
_cb()
catch error
log error
cb(error)
# 读取上一次保存的 pkg.json
getPkg: ->
return Tools.getJSONSync(@curPkg)
# 设置 pkg.json
setPkg: ->
@_getHistory()
taskName = @taskName
_oldPkg = @getPkg()
# 如果命令不带版本参数,则赋值上次的版本
# 如果上次的版本不存在,则默认值为 ‘0.0.1’
_oldPkg.version = '0.0.1' if !_oldPkg.version
# 比较前后两个版本
_isNewVersion = Tools.compareVer(versions[taskName],_oldPkg.version)
# 设置新的pkg
_pkg = {}
_pkg.name = taskName
if _isNewVersion > -1
if _.isEmpty(_oldPkg)
_pkg = _.assing _oldPkg,_pkg
else
_pkg.version = versions[taskName]
_pkg.description = descriptions[taskName]
_pkg.author = authors[taskName]
_pkg.email = emails[taskName]
_pkg.hash = widgetMap[taskName].hash
_pkg.distUri = widgetMap[taskName].distUri
_pkg.debugUri = history[taskName][versions[taskName]].debugUri
_pkg.deps = widgetDeps[taskName]
_pkg.history = history[taskName]
Tools.writeFile @curPkg,JSON.stringify(_pkg,null,4)
widgetMap[taskName].deps = widgetDeps[taskName]
widgetMap[taskName].history = history[taskName]
else
log "'" + color.cyan(_pkg.name) + "'","版本不能低于","'" + color.red(_oldPkg.version) + "'"
# 注册gulp任务
registTask: =>
_this = @
_defTask = []
taskName = _this.name
taskList.push taskName
if _this.htmlFiles.length > 0
gulp.task "#{taskName}_html",->
_this.html()
_defTask.push("#{taskName}_html")
if _this.lessFiles.length > 0
gulp.task "#{taskName}_less",->
_this.less()
_defTask.push("#{taskName}_less")
gulp.task "#{taskName}_jsHint",_defTask,->
_this.jsHint() if _this.env is 'local'
gulp.task "#{taskName}_js",["#{taskName}_jsHint"],->
_this.js()
gulp.task "#{taskName}_pkg",["#{taskName}_js"],->
_this.isJsHasNoError or log color.red("#{taskName} 组件存在语法错误")
_this.setPkg()
gulp.task "#{taskName}_watch",->
gulp.watch _this.files,(res)->
Tools.tips(res)
try
_ext = path.extname(res.path).replace(/^\./,'')
_task = "#{taskName}_#{_ext}"
gulp.start(_task)
catch error
log error
gulp.task taskName,["#{taskName}_pkg"],->
gulp.start("#{taskName}_watch") if _this.env is 'local'
# 生成widget项目的全部task
(->
blocks = Tools.getFolders(root)
blocks.forEach (block)->
if block not in defaultTasks and block isnt 'node_modules'
new build(block).registTask()
)()
# 生成全部map
gulp.task "map",->
oldMap = Tools.getJSONSync widgetMapFile
newMap = _.assign {},oldMap,widgetMap
# console.log newMap
Tools.writeFile(widgetMapFile,JSON.stringify(newMap,null,4))
# 定义 启动任务
if tasks.length == 0
gulp.task 'default',->
console.log "请设置需要构建的项目: ",taskList.concat(['public'])
else if tasks.shift() == 'public'
# log color.red(tasks.length)
if tasks.length == 0
gulp.task 'public',->
tasks.length == 0 && log "请设置需要发布的项目: ",taskList.concat('all')
else
if tasks[0] == 'all'
# console.log taskList
gulp.task 'all',taskList
gulp.task 'public',['all'],->
gulp.start 'map' #if argv.e is 'local'
else
gulp.task 'public',tasks,->
|
[
{
"context": "ase.io/crypto\"\n\nconfig =\n default_key_expire_in: 24*60*60*365*4\n\n(exports[k] = v for k,v of config)\n",
"end": 3552,
"score": 0.9483088850975037,
"start": 3539,
"tag": "KEY",
"value": "24*60*60*365*"
}
] | src/const.iced | samkenxstream/kbpgp | 464 |
exports.openpgp = openpgp =
public_key_algorithms :
RSA : 1
RSA_ENCRYPT_ONLY : 2
RSA_SIGN_ONLY : 3
ELGAMAL : 16
DSA : 17
ECDH : 18
ECDSA : 19
ELGAMAL_SIGN_AND_ENCRYPT : 20
EDDSA : 22
symmetric_key_algorithms :
CAST5 : 3
AES128 : 7
AES192 : 8
AES256 : 9
hash_algorithms :
MD5 : 1
SHA1 : 2
RIPEMD160 : 3
SHA256 : 8
SHA384 : 9
SHA512 : 10
SHA224 : 11
sig_subpacket:
creation_time : 2
expiration_time : 3
exportable_certificate : 4
trust_signature : 5
regular_expression : 6
revocable : 7
key_expiration_time : 9
preferred_symmetric_algorithms : 11
revocation_key : 12
issuer : 16
notation_data : 20
preferred_hash_algorithms : 21
preferred_compression_algorithms : 22
key_server_preferences : 23
preferred_key_server : 24
primary_user_id : 25
policy_uri : 26
key_flags : 27
signers_user_id : 28
reason_for_revocation : 29
features : 30
signature_target : 31
embedded_signature : 32
issuer_fingerprint : 33
experimental_low : 101
experimental_high : 110
sig_types : # See RFC 4880 5.2.1. Signature Types
binary_doc : 0x00
canonical_text : 0x01
issuer : 0x10
persona : 0x11
casual : 0x12
positive : 0x13
subkey_binding : 0x18
primary_binding : 0x19
direct : 0x1f
key_revocation : 0x20
subkey_revocation : 0x28
certificate_revocation : 0x30
message_types :
generic : 0
public_key : 4
private_key : 5
signature : 8
clearsign : 9
s2k :
plain : 0
salt : 1
salt_iter : 3
gnu : 101
gnu_dummy : 1001
s2k_convention :
none : 0
checksum : 255
sha1 : 254
ecdh :
param_bytes : 3
version : 1
packet_tags :
PKESK : 1
signature : 2
one_pass_sig: 4
secret_key : 5
public_key : 6
secret_subkey : 7
compressed : 8
literal : 11
public_subkey : 14
userid : 13
user_attribute : 17
SEIPD : 18
MDC : 19
literal_formats :
binary : 0x62
text : 0x74
utf8 : 0x75
versions :
PKESK : 3
SEIPD : 1
one_pass_sig : 3
keymaterial : V4 : 4
signature :
V2 : 2
V3 : 3
V4 : 4
signatures :
key : 0x99
userid : 0xb4
user_attribute : 0xd1
key_flags :
certify_keys : 0x1
sign_data : 0x2
encrypt_comm : 0x4
encrypt_storage : 0x8
private_split : 0x10
auth : 0x20
shared : 0x80
features:
modification_detection : 0x1
key_server_preferences:
no_modify : 0x80
compression :
none : 0
zip : 1
zlib : 2
bzip : 3
exports.kb =
key_encryption:
none : 0
triplesec_v1 : 1
triplesec_v2 : 2
triplesec_v3 : 3
packet_tags :
p3skb : 0x201
signature : 0x202
encryption : 0x203
public_key_algorithms :
NACL_EDDSA: 0x20
NACL_DH: 0x21
versions :
V1 : 1
padding :
EMSA_PCKS1_v1_5 : 3
RSASSA_PSS : 4
key_defaults:
primary :
expire_in : 0
nbits :
RSA : 4096
ECDSA : 384
DSA : 2048
sub :
expire_in : 24*60*60*365*8
nbits :
RSA : 2048
ECDH : 256
ECDSA : 256
DSA : 2048
ELGAMAL : 2048
kid :
version : 1
trailer : 0x0a
algo : 8
len : 32
exports.ops =
encrypt : 0x1
decrypt : 0x2
verify : 0x4
sign : 0x8
exports.header =
version : "Keybase OpenPGP"
comment : "https://keybase.io/crypto"
config =
default_key_expire_in: 24*60*60*365*4
(exports[k] = v for k,v of config)
| 203995 |
exports.openpgp = openpgp =
public_key_algorithms :
RSA : 1
RSA_ENCRYPT_ONLY : 2
RSA_SIGN_ONLY : 3
ELGAMAL : 16
DSA : 17
ECDH : 18
ECDSA : 19
ELGAMAL_SIGN_AND_ENCRYPT : 20
EDDSA : 22
symmetric_key_algorithms :
CAST5 : 3
AES128 : 7
AES192 : 8
AES256 : 9
hash_algorithms :
MD5 : 1
SHA1 : 2
RIPEMD160 : 3
SHA256 : 8
SHA384 : 9
SHA512 : 10
SHA224 : 11
sig_subpacket:
creation_time : 2
expiration_time : 3
exportable_certificate : 4
trust_signature : 5
regular_expression : 6
revocable : 7
key_expiration_time : 9
preferred_symmetric_algorithms : 11
revocation_key : 12
issuer : 16
notation_data : 20
preferred_hash_algorithms : 21
preferred_compression_algorithms : 22
key_server_preferences : 23
preferred_key_server : 24
primary_user_id : 25
policy_uri : 26
key_flags : 27
signers_user_id : 28
reason_for_revocation : 29
features : 30
signature_target : 31
embedded_signature : 32
issuer_fingerprint : 33
experimental_low : 101
experimental_high : 110
sig_types : # See RFC 4880 5.2.1. Signature Types
binary_doc : 0x00
canonical_text : 0x01
issuer : 0x10
persona : 0x11
casual : 0x12
positive : 0x13
subkey_binding : 0x18
primary_binding : 0x19
direct : 0x1f
key_revocation : 0x20
subkey_revocation : 0x28
certificate_revocation : 0x30
message_types :
generic : 0
public_key : 4
private_key : 5
signature : 8
clearsign : 9
s2k :
plain : 0
salt : 1
salt_iter : 3
gnu : 101
gnu_dummy : 1001
s2k_convention :
none : 0
checksum : 255
sha1 : 254
ecdh :
param_bytes : 3
version : 1
packet_tags :
PKESK : 1
signature : 2
one_pass_sig: 4
secret_key : 5
public_key : 6
secret_subkey : 7
compressed : 8
literal : 11
public_subkey : 14
userid : 13
user_attribute : 17
SEIPD : 18
MDC : 19
literal_formats :
binary : 0x62
text : 0x74
utf8 : 0x75
versions :
PKESK : 3
SEIPD : 1
one_pass_sig : 3
keymaterial : V4 : 4
signature :
V2 : 2
V3 : 3
V4 : 4
signatures :
key : 0x99
userid : 0xb4
user_attribute : 0xd1
key_flags :
certify_keys : 0x1
sign_data : 0x2
encrypt_comm : 0x4
encrypt_storage : 0x8
private_split : 0x10
auth : 0x20
shared : 0x80
features:
modification_detection : 0x1
key_server_preferences:
no_modify : 0x80
compression :
none : 0
zip : 1
zlib : 2
bzip : 3
exports.kb =
key_encryption:
none : 0
triplesec_v1 : 1
triplesec_v2 : 2
triplesec_v3 : 3
packet_tags :
p3skb : 0x201
signature : 0x202
encryption : 0x203
public_key_algorithms :
NACL_EDDSA: 0x20
NACL_DH: 0x21
versions :
V1 : 1
padding :
EMSA_PCKS1_v1_5 : 3
RSASSA_PSS : 4
key_defaults:
primary :
expire_in : 0
nbits :
RSA : 4096
ECDSA : 384
DSA : 2048
sub :
expire_in : 24*60*60*365*8
nbits :
RSA : 2048
ECDH : 256
ECDSA : 256
DSA : 2048
ELGAMAL : 2048
kid :
version : 1
trailer : 0x0a
algo : 8
len : 32
exports.ops =
encrypt : 0x1
decrypt : 0x2
verify : 0x4
sign : 0x8
exports.header =
version : "Keybase OpenPGP"
comment : "https://keybase.io/crypto"
config =
default_key_expire_in: <KEY>4
(exports[k] = v for k,v of config)
| true |
exports.openpgp = openpgp =
public_key_algorithms :
RSA : 1
RSA_ENCRYPT_ONLY : 2
RSA_SIGN_ONLY : 3
ELGAMAL : 16
DSA : 17
ECDH : 18
ECDSA : 19
ELGAMAL_SIGN_AND_ENCRYPT : 20
EDDSA : 22
symmetric_key_algorithms :
CAST5 : 3
AES128 : 7
AES192 : 8
AES256 : 9
hash_algorithms :
MD5 : 1
SHA1 : 2
RIPEMD160 : 3
SHA256 : 8
SHA384 : 9
SHA512 : 10
SHA224 : 11
sig_subpacket:
creation_time : 2
expiration_time : 3
exportable_certificate : 4
trust_signature : 5
regular_expression : 6
revocable : 7
key_expiration_time : 9
preferred_symmetric_algorithms : 11
revocation_key : 12
issuer : 16
notation_data : 20
preferred_hash_algorithms : 21
preferred_compression_algorithms : 22
key_server_preferences : 23
preferred_key_server : 24
primary_user_id : 25
policy_uri : 26
key_flags : 27
signers_user_id : 28
reason_for_revocation : 29
features : 30
signature_target : 31
embedded_signature : 32
issuer_fingerprint : 33
experimental_low : 101
experimental_high : 110
sig_types : # See RFC 4880 5.2.1. Signature Types
binary_doc : 0x00
canonical_text : 0x01
issuer : 0x10
persona : 0x11
casual : 0x12
positive : 0x13
subkey_binding : 0x18
primary_binding : 0x19
direct : 0x1f
key_revocation : 0x20
subkey_revocation : 0x28
certificate_revocation : 0x30
message_types :
generic : 0
public_key : 4
private_key : 5
signature : 8
clearsign : 9
s2k :
plain : 0
salt : 1
salt_iter : 3
gnu : 101
gnu_dummy : 1001
s2k_convention :
none : 0
checksum : 255
sha1 : 254
ecdh :
param_bytes : 3
version : 1
packet_tags :
PKESK : 1
signature : 2
one_pass_sig: 4
secret_key : 5
public_key : 6
secret_subkey : 7
compressed : 8
literal : 11
public_subkey : 14
userid : 13
user_attribute : 17
SEIPD : 18
MDC : 19
literal_formats :
binary : 0x62
text : 0x74
utf8 : 0x75
versions :
PKESK : 3
SEIPD : 1
one_pass_sig : 3
keymaterial : V4 : 4
signature :
V2 : 2
V3 : 3
V4 : 4
signatures :
key : 0x99
userid : 0xb4
user_attribute : 0xd1
key_flags :
certify_keys : 0x1
sign_data : 0x2
encrypt_comm : 0x4
encrypt_storage : 0x8
private_split : 0x10
auth : 0x20
shared : 0x80
features:
modification_detection : 0x1
key_server_preferences:
no_modify : 0x80
compression :
none : 0
zip : 1
zlib : 2
bzip : 3
exports.kb =
key_encryption:
none : 0
triplesec_v1 : 1
triplesec_v2 : 2
triplesec_v3 : 3
packet_tags :
p3skb : 0x201
signature : 0x202
encryption : 0x203
public_key_algorithms :
NACL_EDDSA: 0x20
NACL_DH: 0x21
versions :
V1 : 1
padding :
EMSA_PCKS1_v1_5 : 3
RSASSA_PSS : 4
key_defaults:
primary :
expire_in : 0
nbits :
RSA : 4096
ECDSA : 384
DSA : 2048
sub :
expire_in : 24*60*60*365*8
nbits :
RSA : 2048
ECDH : 256
ECDSA : 256
DSA : 2048
ELGAMAL : 2048
kid :
version : 1
trailer : 0x0a
algo : 8
len : 32
exports.ops =
encrypt : 0x1
decrypt : 0x2
verify : 0x4
sign : 0x8
exports.header =
version : "Keybase OpenPGP"
comment : "https://keybase.io/crypto"
config =
default_key_expire_in: PI:KEY:<KEY>END_PI4
(exports[k] = v for k,v of config)
|
[
{
"context": ".ipa.user.del connection: ipa,\n username: 'test_user_del'\n\n describe 'action', ->\n \n they 'delete a",
"end": 354,
"score": 0.999475359916687,
"start": 341,
"tag": "USERNAME",
"value": "test_user_del"
},
{
"context": " sn: 'Delete'\n mail: ... | packages/ipa/test/user/del.coffee | shivaylamba/meilisearch-gatsby-plugin-guide | 31 |
nikita = require '@nikitajs/core/lib'
{tags, config, ipa} = require '../test'
they = require('mocha-they')(config)
return unless tags.ipa
describe 'ipa.user.del', ->
describe 'schema', ->
they 'use `username` as alias for `uid`', ({ssh}) ->
nikita
$ssh: ssh
.ipa.user.del connection: ipa,
username: 'test_user_del'
describe 'action', ->
they 'delete a missing user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user.del connection: ipa,
uid: 'test_user_del'
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.false()
they 'delete a user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user connection: ipa,
uid: 'test_user_del'
attributes:
givenname: 'User'
sn: 'Delete'
mail: [
'test_user_del@nikita.js.org'
]
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.true()
| 130962 |
nikita = require '@nikitajs/core/lib'
{tags, config, ipa} = require '../test'
they = require('mocha-they')(config)
return unless tags.ipa
describe 'ipa.user.del', ->
describe 'schema', ->
they 'use `username` as alias for `uid`', ({ssh}) ->
nikita
$ssh: ssh
.ipa.user.del connection: ipa,
username: 'test_user_del'
describe 'action', ->
they 'delete a missing user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user.del connection: ipa,
uid: 'test_user_del'
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.false()
they 'delete a user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user connection: ipa,
uid: 'test_user_del'
attributes:
givenname: 'User'
sn: 'Delete'
mail: [
'<EMAIL>'
]
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.true()
| true |
nikita = require '@nikitajs/core/lib'
{tags, config, ipa} = require '../test'
they = require('mocha-they')(config)
return unless tags.ipa
describe 'ipa.user.del', ->
describe 'schema', ->
they 'use `username` as alias for `uid`', ({ssh}) ->
nikita
$ssh: ssh
.ipa.user.del connection: ipa,
username: 'test_user_del'
describe 'action', ->
they 'delete a missing user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user.del connection: ipa,
uid: 'test_user_del'
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.false()
they 'delete a user', ({ssh}) ->
nikita
$ssh: ssh
, ->
@ipa.user connection: ipa,
uid: 'test_user_del'
attributes:
givenname: 'User'
sn: 'Delete'
mail: [
'PI:EMAIL:<EMAIL>END_PI'
]
{$status} = await @ipa.user.del connection: ipa,
uid: 'test_user_del'
$status.should.be.true()
|
[
{
"context": "24\n\t\t\treturn\n\n\t\t# Username\n\t\tForm.input\n\t\t\tname: \"username\"\n\t\t\ttext: tr(\"Username\")\n\t\t\tvalue: credentials.ge",
"end": 1631,
"score": 0.9808918833732605,
"start": 1623,
"tag": "USERNAME",
"value": "username"
},
{
"context": "ame\n\t\tForm.input... | client.coffee | basilfx/Happening-eetlijst | 0 | Db = require "db"
Dom = require "dom"
Modal = require "modal"
Obs = require "obs"
Plugin = require "plugin"
Server = require "server"
Ui = require "ui"
Form = require "form"
Time = require "time"
tr = require("i18n").tr
# Open dialog to select an user
userSelect = (opts) ->
doModal = !->
Modal.show opts.title, !->
Dom.style width: "80%"
Ui.list !->
Dom.style
maxHeight: "40%"
overflow: "auto"
_overflowScrolling: "touch"
backgroundColor: "#eee"
margin: "-12px -12px -15px -12px"
Plugin.users.iterate (user) !->
Ui.item !->
Ui.avatar user.get("avatar")
Dom.text user.get("name")
if user.key() is opts.value
Dom.style fontWeight: "bold"
Dom.div !->
Dom.style
padding: "0 10px"
WebkitBoxFlex: 1
textAlign: "right"
fontSize: "150%"
color: "#72BB53"
Dom.text "✓"
Dom.onTap !->
opts.onSave user.key()
Modal.remove()
user = Plugin.users.get(opts.value)
if content = opts.content
content user, doModal
# Render the settings UI. The Settings UI has an username and password field.
# Optionally, the user can clear all data.
exports.renderSettings = ->
credentials = Obs.create()
# Helper for merging the credential data
merge = (d) !->
if not d
log "Received no credentials. Plugin not configured?"
else
credentials.merge(d)
# Request server credentials
Server.call "getCredentials", merge
Obs.observe !->
# Wait for data to be ready
if credentials.get("username") is null
Ui.spinner 24
return
# Username
Form.input
name: "username"
text: tr("Username")
value: credentials.get("username")
onSave: (value) ->
Server.sync "setCredentials", {"username": value}, merge, !->
credentials.set "username", value
# Password
Form.input
name: "password"
text: tr("Password")
type: "password"
value: credentials.get("password")
onSave: (value) ->
Server.sync "setCredentials", {"password": value}, merge, !->
credentials.set("password", value)
# Clear all
Ui.bigButton tr("Clear all data"), !->
Modal.confirm tr("Clear all data"), tr("Are you sure you want to clear all data?"), !->
credentials.set
username: null
password: null
Server.call "clearAll"
# Render the UI
exports.render = ->
# Ask the server to refresh the data. In case new data is available, it will
# be pushed to the client. The lastVisit variable will store the last visit,
# so new information can be highlighted.
Server.call "refresh", (lastVisit) ->
Db.local.set "lastVisit", lastVisit
setTimeout ->
# Set to greater number, so highlights won't appear again.
Db.local.set("lastVisit", new Date().getTime())
, 5000
# App not configured
if not Db.shared.get("info")
# Title
Dom.h1 "Eetlijst"
# Warning message
if Plugin.userIsAdmin()
Dom.p tr("This application isn't configured yet. Please enter the
credentials on the settings page.")
else
Dom.p tr("This application isn't configured yet. Only admins of this
Happening can configure it.")
# Some error occured, e.g. wrong credentials
else if Db.shared.get("info", "error")
Dom.h1 "Eetlijst"
Dom.p tr("Some error occured while configuring this plugin. Are the
credentials correctly entered?")
# App is configured
else
# Title
Dom.h1 !->
Dom.style
whiteSpace: "nowrap"
Dom.text Db.shared.get("info", "name") + " - Eetlijst"
# Noticeboard
Dom.h2 tr("Noticeboard")
Dom.form !->
Form.text
name: "tapText"
autogrow: true
format: (t) -> Dom.richText(t)
value: Db.shared.get("noticeboard")
title: tr("Noticeboard is empty!")
onSave: (v) !->
Server.sync "setNoticeboard", v, ->
Db.shared.set("noticeboard", v)
# List of todays statuses
Dom.h2 tr("Statuses")
Obs.observe !->
today = Db.shared.ref("today")
if not today
Dom.p tr("There is no status information available for today")
return
# Verify deadline
deadline = new Date(today.peek("deadline") * 1000)
deadlinePassed = new Date().getTime() > deadline.getTime()
if deadlinePassed
Time.deltaText deadline / 1000, "default", (t) !->
Dom.p tr("You cannot change the status for today. The \
deadline of %1 has already passed!", t)
else
Time.deltaText deadline.getTime() / 1000, "default", (t) !->
Dom.p tr("The deadline will expire in %1.", t)
# Draw all rows
Ui.list !-> today.iterate "statuses", (status) !->
resident = status.peek("resident")
# Draw single row
Ui.item !->
Dom.style
padding: "0"
height: "64px"
# User avatar
userSelect
name: "user"
title: tr("Map user %1 to:", resident)
value: Db.shared.get("mapping", status.key())
onSave: (userId) !-> Server.sync "setUser", status.key(), userId, !->
# An userId cannot be associated with two residents
for k, v of Db.shared.get("mapping")
if v == userId
Db.shared.set "mapping", k, null
# Set new mapping
Db.shared.set "mapping", status.key(), userId
content: (user, modal) !->
Dom.div !->
Dom.style
margin: "0 8px 0 0"
width: "38px"
Ui.avatar user?.avatar
Dom.onTap modal
Dom.div !->
Dom.style
whiteSpace: "nowrap"
overflow: "hidden"
Flex: 1
# Resident name and last changed
if user
Dom.text user.name + " "
Dom.span ->
Dom.style
color: "#ccc"
Dom.text "(" + resident + ")"
else
Dom.text resident
Dom.br()
Dom.span !->
lastChanged = status.get("lastChanged")
lastVisit = Db.local.get("lastVisit") or 0
if lastChanged > lastVisit
Dom.style
color: "red"
else
Dom.style
color: "#ccc"
Time.deltaText lastChanged, "short"
Obs.observe !->
value = status.get("value")
# Helper for toggling state, including deadline check
toggleStatus = (resident, value) ->
if deadlinePassed
Modal.show "Deadline passed", "The deadline has already passed."
else
Server.sync "setStatus", resident, value, today.peek("timestamp"), ->
Db.shared.set("today", "statuses", resident, "value", value)
Db.shared.set("today", "statuses", resident, "lastChanged", (new Date().getTime() / 1000))
# Diner button
Ui.button !->
extra = ""
if value < 0
Dom.style
backgroundColor: "#72bb53"
border: "1px #72bb53 solid"
color: "white"
# Extra people attend dinner
if value < -1
extra = " +" + (-1 * value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #72bb53 solid"
color: "black"
Dom.text tr("Diner") + extra
, -> toggleStatus(status.key(), if value < 0 then (value - 1) else -1)
# No button
Ui.button !->
if value == 0
Dom.style
backgroundColor: "gray"
border: "1px gray solid"
color: "white"
else
Dom.style
backgroundColor: "#fff"
border: "1px gray solid"
color: "black"
Dom.text tr("No")
, -> toggleStatus(status.key(), 0)
# Cook button
Ui.button !->
extra = ""
if value > 0
Dom.style
backgroundColor: "#a00"
border: "1px #a00 solid"
color: "white"
# Extra people attend dinner
if value > 1
extra = " +" + (value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #a00 solid"
color: "black"
Dom.text tr("Cook") + extra
, -> toggleStatus(status.key(), if value > 0 then (value + 1) else 1)
| 116723 | Db = require "db"
Dom = require "dom"
Modal = require "modal"
Obs = require "obs"
Plugin = require "plugin"
Server = require "server"
Ui = require "ui"
Form = require "form"
Time = require "time"
tr = require("i18n").tr
# Open dialog to select an user
userSelect = (opts) ->
doModal = !->
Modal.show opts.title, !->
Dom.style width: "80%"
Ui.list !->
Dom.style
maxHeight: "40%"
overflow: "auto"
_overflowScrolling: "touch"
backgroundColor: "#eee"
margin: "-12px -12px -15px -12px"
Plugin.users.iterate (user) !->
Ui.item !->
Ui.avatar user.get("avatar")
Dom.text user.get("name")
if user.key() is opts.value
Dom.style fontWeight: "bold"
Dom.div !->
Dom.style
padding: "0 10px"
WebkitBoxFlex: 1
textAlign: "right"
fontSize: "150%"
color: "#72BB53"
Dom.text "✓"
Dom.onTap !->
opts.onSave user.key()
Modal.remove()
user = Plugin.users.get(opts.value)
if content = opts.content
content user, doModal
# Render the settings UI. The Settings UI has an username and password field.
# Optionally, the user can clear all data.
exports.renderSettings = ->
credentials = Obs.create()
# Helper for merging the credential data
merge = (d) !->
if not d
log "Received no credentials. Plugin not configured?"
else
credentials.merge(d)
# Request server credentials
Server.call "getCredentials", merge
Obs.observe !->
# Wait for data to be ready
if credentials.get("username") is null
Ui.spinner 24
return
# Username
Form.input
name: "username"
text: tr("Username")
value: credentials.get("username")
onSave: (value) ->
Server.sync "setCredentials", {"username": value}, merge, !->
credentials.set "username", value
# Password
Form.input
name: "password"
text: tr("<PASSWORD>")
type: "password"
value: credentials.get("password")
onSave: (value) ->
Server.sync "setCredentials", {"password": value}, merge, !->
credentials.set("password", value)
# Clear all
Ui.bigButton tr("Clear all data"), !->
Modal.confirm tr("Clear all data"), tr("Are you sure you want to clear all data?"), !->
credentials.set
username: null
password: <PASSWORD>
Server.call "clearAll"
# Render the UI
exports.render = ->
# Ask the server to refresh the data. In case new data is available, it will
# be pushed to the client. The lastVisit variable will store the last visit,
# so new information can be highlighted.
Server.call "refresh", (lastVisit) ->
Db.local.set "lastVisit", lastVisit
setTimeout ->
# Set to greater number, so highlights won't appear again.
Db.local.set("lastVisit", new Date().getTime())
, 5000
# App not configured
if not Db.shared.get("info")
# Title
Dom.h1 "Eetlijst"
# Warning message
if Plugin.userIsAdmin()
Dom.p tr("This application isn't configured yet. Please enter the
credentials on the settings page.")
else
Dom.p tr("This application isn't configured yet. Only admins of this
Happening can configure it.")
# Some error occured, e.g. wrong credentials
else if Db.shared.get("info", "error")
Dom.h1 "Eetlijst"
Dom.p tr("Some error occured while configuring this plugin. Are the
credentials correctly entered?")
# App is configured
else
# Title
Dom.h1 !->
Dom.style
whiteSpace: "nowrap"
Dom.text Db.shared.get("info", "name") + " - Eetlijst"
# Noticeboard
Dom.h2 tr("Noticeboard")
Dom.form !->
Form.text
name: "tapText"
autogrow: true
format: (t) -> Dom.richText(t)
value: Db.shared.get("noticeboard")
title: tr("Noticeboard is empty!")
onSave: (v) !->
Server.sync "setNoticeboard", v, ->
Db.shared.set("noticeboard", v)
# List of todays statuses
Dom.h2 tr("Statuses")
Obs.observe !->
today = Db.shared.ref("today")
if not today
Dom.p tr("There is no status information available for today")
return
# Verify deadline
deadline = new Date(today.peek("deadline") * 1000)
deadlinePassed = new Date().getTime() > deadline.getTime()
if deadlinePassed
Time.deltaText deadline / 1000, "default", (t) !->
Dom.p tr("You cannot change the status for today. The \
deadline of %1 has already passed!", t)
else
Time.deltaText deadline.getTime() / 1000, "default", (t) !->
Dom.p tr("The deadline will expire in %1.", t)
# Draw all rows
Ui.list !-> today.iterate "statuses", (status) !->
resident = status.peek("resident")
# Draw single row
Ui.item !->
Dom.style
padding: "0"
height: "64px"
# User avatar
userSelect
name: "user"
title: tr("Map user %1 to:", resident)
value: Db.shared.get("mapping", status.key())
onSave: (userId) !-> Server.sync "setUser", status.key(), userId, !->
# An userId cannot be associated with two residents
for k, v of Db.shared.get("mapping")
if v == userId
Db.shared.set "mapping", k, null
# Set new mapping
Db.shared.set "mapping", status.key(), userId
content: (user, modal) !->
Dom.div !->
Dom.style
margin: "0 8px 0 0"
width: "38px"
Ui.avatar user?.avatar
Dom.onTap modal
Dom.div !->
Dom.style
whiteSpace: "nowrap"
overflow: "hidden"
Flex: 1
# Resident name and last changed
if user
Dom.text user.name + " "
Dom.span ->
Dom.style
color: "#ccc"
Dom.text "(" + resident + ")"
else
Dom.text resident
Dom.br()
Dom.span !->
lastChanged = status.get("lastChanged")
lastVisit = Db.local.get("lastVisit") or 0
if lastChanged > lastVisit
Dom.style
color: "red"
else
Dom.style
color: "#ccc"
Time.deltaText lastChanged, "short"
Obs.observe !->
value = status.get("value")
# Helper for toggling state, including deadline check
toggleStatus = (resident, value) ->
if deadlinePassed
Modal.show "Deadline passed", "The deadline has already passed."
else
Server.sync "setStatus", resident, value, today.peek("timestamp"), ->
Db.shared.set("today", "statuses", resident, "value", value)
Db.shared.set("today", "statuses", resident, "lastChanged", (new Date().getTime() / 1000))
# Diner button
Ui.button !->
extra = ""
if value < 0
Dom.style
backgroundColor: "#72bb53"
border: "1px #72bb53 solid"
color: "white"
# Extra people attend dinner
if value < -1
extra = " +" + (-1 * value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #72bb53 solid"
color: "black"
Dom.text tr("Diner") + extra
, -> toggleStatus(status.key(), if value < 0 then (value - 1) else -1)
# No button
Ui.button !->
if value == 0
Dom.style
backgroundColor: "gray"
border: "1px gray solid"
color: "white"
else
Dom.style
backgroundColor: "#fff"
border: "1px gray solid"
color: "black"
Dom.text tr("No")
, -> toggleStatus(status.key(), 0)
# Cook button
Ui.button !->
extra = ""
if value > 0
Dom.style
backgroundColor: "#a00"
border: "1px #a00 solid"
color: "white"
# Extra people attend dinner
if value > 1
extra = " +" + (value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #a00 solid"
color: "black"
Dom.text tr("Cook") + extra
, -> toggleStatus(status.key(), if value > 0 then (value + 1) else 1)
| true | Db = require "db"
Dom = require "dom"
Modal = require "modal"
Obs = require "obs"
Plugin = require "plugin"
Server = require "server"
Ui = require "ui"
Form = require "form"
Time = require "time"
tr = require("i18n").tr
# Open dialog to select an user
userSelect = (opts) ->
doModal = !->
Modal.show opts.title, !->
Dom.style width: "80%"
Ui.list !->
Dom.style
maxHeight: "40%"
overflow: "auto"
_overflowScrolling: "touch"
backgroundColor: "#eee"
margin: "-12px -12px -15px -12px"
Plugin.users.iterate (user) !->
Ui.item !->
Ui.avatar user.get("avatar")
Dom.text user.get("name")
if user.key() is opts.value
Dom.style fontWeight: "bold"
Dom.div !->
Dom.style
padding: "0 10px"
WebkitBoxFlex: 1
textAlign: "right"
fontSize: "150%"
color: "#72BB53"
Dom.text "✓"
Dom.onTap !->
opts.onSave user.key()
Modal.remove()
user = Plugin.users.get(opts.value)
if content = opts.content
content user, doModal
# Render the settings UI. The Settings UI has an username and password field.
# Optionally, the user can clear all data.
exports.renderSettings = ->
credentials = Obs.create()
# Helper for merging the credential data
merge = (d) !->
if not d
log "Received no credentials. Plugin not configured?"
else
credentials.merge(d)
# Request server credentials
Server.call "getCredentials", merge
Obs.observe !->
# Wait for data to be ready
if credentials.get("username") is null
Ui.spinner 24
return
# Username
Form.input
name: "username"
text: tr("Username")
value: credentials.get("username")
onSave: (value) ->
Server.sync "setCredentials", {"username": value}, merge, !->
credentials.set "username", value
# Password
Form.input
name: "password"
text: tr("PI:PASSWORD:<PASSWORD>END_PI")
type: "password"
value: credentials.get("password")
onSave: (value) ->
Server.sync "setCredentials", {"password": value}, merge, !->
credentials.set("password", value)
# Clear all
Ui.bigButton tr("Clear all data"), !->
Modal.confirm tr("Clear all data"), tr("Are you sure you want to clear all data?"), !->
credentials.set
username: null
password: PI:PASSWORD:<PASSWORD>END_PI
Server.call "clearAll"
# Render the UI
exports.render = ->
# Ask the server to refresh the data. In case new data is available, it will
# be pushed to the client. The lastVisit variable will store the last visit,
# so new information can be highlighted.
Server.call "refresh", (lastVisit) ->
Db.local.set "lastVisit", lastVisit
setTimeout ->
# Set to greater number, so highlights won't appear again.
Db.local.set("lastVisit", new Date().getTime())
, 5000
# App not configured
if not Db.shared.get("info")
# Title
Dom.h1 "Eetlijst"
# Warning message
if Plugin.userIsAdmin()
Dom.p tr("This application isn't configured yet. Please enter the
credentials on the settings page.")
else
Dom.p tr("This application isn't configured yet. Only admins of this
Happening can configure it.")
# Some error occured, e.g. wrong credentials
else if Db.shared.get("info", "error")
Dom.h1 "Eetlijst"
Dom.p tr("Some error occured while configuring this plugin. Are the
credentials correctly entered?")
# App is configured
else
# Title
Dom.h1 !->
Dom.style
whiteSpace: "nowrap"
Dom.text Db.shared.get("info", "name") + " - Eetlijst"
# Noticeboard
Dom.h2 tr("Noticeboard")
Dom.form !->
Form.text
name: "tapText"
autogrow: true
format: (t) -> Dom.richText(t)
value: Db.shared.get("noticeboard")
title: tr("Noticeboard is empty!")
onSave: (v) !->
Server.sync "setNoticeboard", v, ->
Db.shared.set("noticeboard", v)
# List of todays statuses
Dom.h2 tr("Statuses")
Obs.observe !->
today = Db.shared.ref("today")
if not today
Dom.p tr("There is no status information available for today")
return
# Verify deadline
deadline = new Date(today.peek("deadline") * 1000)
deadlinePassed = new Date().getTime() > deadline.getTime()
if deadlinePassed
Time.deltaText deadline / 1000, "default", (t) !->
Dom.p tr("You cannot change the status for today. The \
deadline of %1 has already passed!", t)
else
Time.deltaText deadline.getTime() / 1000, "default", (t) !->
Dom.p tr("The deadline will expire in %1.", t)
# Draw all rows
Ui.list !-> today.iterate "statuses", (status) !->
resident = status.peek("resident")
# Draw single row
Ui.item !->
Dom.style
padding: "0"
height: "64px"
# User avatar
userSelect
name: "user"
title: tr("Map user %1 to:", resident)
value: Db.shared.get("mapping", status.key())
onSave: (userId) !-> Server.sync "setUser", status.key(), userId, !->
# An userId cannot be associated with two residents
for k, v of Db.shared.get("mapping")
if v == userId
Db.shared.set "mapping", k, null
# Set new mapping
Db.shared.set "mapping", status.key(), userId
content: (user, modal) !->
Dom.div !->
Dom.style
margin: "0 8px 0 0"
width: "38px"
Ui.avatar user?.avatar
Dom.onTap modal
Dom.div !->
Dom.style
whiteSpace: "nowrap"
overflow: "hidden"
Flex: 1
# Resident name and last changed
if user
Dom.text user.name + " "
Dom.span ->
Dom.style
color: "#ccc"
Dom.text "(" + resident + ")"
else
Dom.text resident
Dom.br()
Dom.span !->
lastChanged = status.get("lastChanged")
lastVisit = Db.local.get("lastVisit") or 0
if lastChanged > lastVisit
Dom.style
color: "red"
else
Dom.style
color: "#ccc"
Time.deltaText lastChanged, "short"
Obs.observe !->
value = status.get("value")
# Helper for toggling state, including deadline check
toggleStatus = (resident, value) ->
if deadlinePassed
Modal.show "Deadline passed", "The deadline has already passed."
else
Server.sync "setStatus", resident, value, today.peek("timestamp"), ->
Db.shared.set("today", "statuses", resident, "value", value)
Db.shared.set("today", "statuses", resident, "lastChanged", (new Date().getTime() / 1000))
# Diner button
Ui.button !->
extra = ""
if value < 0
Dom.style
backgroundColor: "#72bb53"
border: "1px #72bb53 solid"
color: "white"
# Extra people attend dinner
if value < -1
extra = " +" + (-1 * value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #72bb53 solid"
color: "black"
Dom.text tr("Diner") + extra
, -> toggleStatus(status.key(), if value < 0 then (value - 1) else -1)
# No button
Ui.button !->
if value == 0
Dom.style
backgroundColor: "gray"
border: "1px gray solid"
color: "white"
else
Dom.style
backgroundColor: "#fff"
border: "1px gray solid"
color: "black"
Dom.text tr("No")
, -> toggleStatus(status.key(), 0)
# Cook button
Ui.button !->
extra = ""
if value > 0
Dom.style
backgroundColor: "#a00"
border: "1px #a00 solid"
color: "white"
# Extra people attend dinner
if value > 1
extra = " +" + (value - 1)
else
Dom.style
backgroundColor: "#fff"
border: "1px #a00 solid"
color: "black"
Dom.text tr("Cook") + extra
, -> toggleStatus(status.key(), if value > 0 then (value + 1) else 1)
|
[
{
"context": "ing: \"Probando, uno, dos, tres!\"\n\tsomeOneFamous: \"alguien famoso\"\n\tglobal:\n\t\tchangeLocale: \"Cambiar el idioma\"",
"end": 162,
"score": 0.9993112683296204,
"start": 148,
"tag": "NAME",
"value": "alguien famoso"
}
] | src/coffee/translation-es.coffee | vtex/front.i18n | 0 | window.vtex = window.vtex || {}
window.vtex.i18n = vtex.i18n || {}
window.vtex.i18n["es"] =
testing: "Probando, uno, dos, tres!"
someOneFamous: "alguien famoso"
global:
changeLocale: "Cambiar el idioma" | 35270 | window.vtex = window.vtex || {}
window.vtex.i18n = vtex.i18n || {}
window.vtex.i18n["es"] =
testing: "Probando, uno, dos, tres!"
someOneFamous: "<NAME>"
global:
changeLocale: "Cambiar el idioma" | true | window.vtex = window.vtex || {}
window.vtex.i18n = vtex.i18n || {}
window.vtex.i18n["es"] =
testing: "Probando, uno, dos, tres!"
someOneFamous: "PI:NAME:<NAME>END_PI"
global:
changeLocale: "Cambiar el idioma" |
[
{
"context": "CI_chart').data('results')\n xkey: 'percentile_rank'\n ykeys: ['percentile_rank','ann_rank','smm_",
"end": 455,
"score": 0.5420827865600586,
"start": 451,
"tag": "KEY",
"value": "rank"
},
{
"context": " xkey: 'percentile_rank'\n ykeys: ['percentile_rank',... | app/assets/javascripts/mhci_result/mhci_result.coffee | SeokJongYu/Kepre | 0 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$(document).on 'turbolinks:load', ->
return unless page.controller() == 'mhci_result' && page.action() == 'plot'
jQuery ->
Morris.Line
element: 'MHCI_chart'
data: $('#MHCI_chart').data('results')
xkey: 'percentile_rank'
ykeys: ['percentile_rank','ann_rank','smm_rank']
labels: ['percentile_rank','ann_rank','smm_rank']
parseTime: false
xLabelMargin: 10
hoverCallback: (index, options) ->
row = options.data[index]
row.peptide
| 115524 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$(document).on 'turbolinks:load', ->
return unless page.controller() == 'mhci_result' && page.action() == 'plot'
jQuery ->
Morris.Line
element: 'MHCI_chart'
data: $('#MHCI_chart').data('results')
xkey: 'percentile_<KEY>'
ykeys: ['percentile_<KEY>','ann_<KEY>','sm<KEY>']
labels: ['percentile_rank','ann_rank','smm_rank']
parseTime: false
xLabelMargin: 10
hoverCallback: (index, options) ->
row = options.data[index]
row.peptide
| true | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$(document).on 'turbolinks:load', ->
return unless page.controller() == 'mhci_result' && page.action() == 'plot'
jQuery ->
Morris.Line
element: 'MHCI_chart'
data: $('#MHCI_chart').data('results')
xkey: 'percentile_PI:KEY:<KEY>END_PI'
ykeys: ['percentile_PI:KEY:<KEY>END_PI','ann_PI:KEY:<KEY>END_PI','smPI:KEY:<KEY>END_PI']
labels: ['percentile_rank','ann_rank','smm_rank']
parseTime: false
xLabelMargin: 10
hoverCallback: (index, options) ->
row = options.data[index]
row.peptide
|
[
{
"context": "erms of the MIT license.\nCopyright 2012 - 2016 (c) Markus Kohlhase <mail@markus-kohlhase.de>\n###\n\nRouter = require \"",
"end": 109,
"score": 0.9998955726623535,
"start": 94,
"tag": "NAME",
"value": "Markus Kohlhase"
},
{
"context": "cense.\nCopyright 2012 - 2016 (c)... | src/Application.coffee | flosse/node-xmpp-joap | 0 | ###
This program is distributed under the terms of the MIT license.
Copyright 2012 - 2016 (c) Markus Kohlhase <mail@markus-kohlhase.de>
###
Router = require "./Router"
async = require "async"
TYPES = [
'add'
'read'
'edit'
'delete'
'describe'
'search'
'rpc'
]
class Response
constructor: (@req, @app) ->
end: (data=@data) -> @app.router.sendResponse @req, data
error: (err) -> @app.router.sendError err, @req
class Application
constructor: (@xmpp, opt={}) ->
unless @xmpp?.connection?.jid?
throw new Error "invalid XMPP Component"
{ errorOnTooBusy } = opt
@router = new Router @xmpp, { errorOnTooBusy }
@plugins = { use:[] }
@router.on "action", (req) =>
res = new Response req, @
async.applyEachSeries @plugins.use, req, res, (err) =>
return console.error err if err
if (fns = @plugins[req.type])?
async.applyEachSeries fns, req, res, (err) ->
return console.error err if err
use: (fn, type) ->
return unless typeof fn is 'function'
if type then return unless type in TYPES
else type = 'use'
@plugins[type] ?= []
@plugins[type].push fn
add: (fn) -> @use fn, 'add'
module.exports = Application
| 107865 | ###
This program is distributed under the terms of the MIT license.
Copyright 2012 - 2016 (c) <NAME> <<EMAIL>>
###
Router = require "./Router"
async = require "async"
TYPES = [
'add'
'read'
'edit'
'delete'
'describe'
'search'
'rpc'
]
class Response
constructor: (@req, @app) ->
end: (data=@data) -> @app.router.sendResponse @req, data
error: (err) -> @app.router.sendError err, @req
class Application
constructor: (@xmpp, opt={}) ->
unless @xmpp?.connection?.jid?
throw new Error "invalid XMPP Component"
{ errorOnTooBusy } = opt
@router = new Router @xmpp, { errorOnTooBusy }
@plugins = { use:[] }
@router.on "action", (req) =>
res = new Response req, @
async.applyEachSeries @plugins.use, req, res, (err) =>
return console.error err if err
if (fns = @plugins[req.type])?
async.applyEachSeries fns, req, res, (err) ->
return console.error err if err
use: (fn, type) ->
return unless typeof fn is 'function'
if type then return unless type in TYPES
else type = 'use'
@plugins[type] ?= []
@plugins[type].push fn
add: (fn) -> @use fn, 'add'
module.exports = Application
| true | ###
This program is distributed under the terms of the MIT license.
Copyright 2012 - 2016 (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
Router = require "./Router"
async = require "async"
TYPES = [
'add'
'read'
'edit'
'delete'
'describe'
'search'
'rpc'
]
class Response
constructor: (@req, @app) ->
end: (data=@data) -> @app.router.sendResponse @req, data
error: (err) -> @app.router.sendError err, @req
class Application
constructor: (@xmpp, opt={}) ->
unless @xmpp?.connection?.jid?
throw new Error "invalid XMPP Component"
{ errorOnTooBusy } = opt
@router = new Router @xmpp, { errorOnTooBusy }
@plugins = { use:[] }
@router.on "action", (req) =>
res = new Response req, @
async.applyEachSeries @plugins.use, req, res, (err) =>
return console.error err if err
if (fns = @plugins[req.type])?
async.applyEachSeries fns, req, res, (err) ->
return console.error err if err
use: (fn, type) ->
return unless typeof fn is 'function'
if type then return unless type in TYPES
else type = 'use'
@plugins[type] ?= []
@plugins[type].push fn
add: (fn) -> @use fn, 'add'
module.exports = Application
|
[
{
"context": ".superPass = Meteor.settings?.ldap?.superPass || \"123456\"\n LDAP_DEFAULTS.admins = Meteor.settings?.ldap?.",
"end": 363,
"score": 0.9987867474555969,
"start": 357,
"tag": "PASSWORD",
"value": "123456"
},
{
"context": "ggedInUser or !Roles.userIsInRole(loggedInUser, [... | server/ldap.coffee | ICTU/appstore | 0 | Meteor.startup ->
LDAP_DEFAULTS.url = Meteor.settings?.ldap?.serverAddr || "ldap://ldap"
LDAP_DEFAULTS.port = Meteor.settings?.ldap?.serverPort || "389"
LDAP_DEFAULTS.base = Meteor.settings?.ldap?.baseDn || "dc=ictu,dc=nl"
LDAP_DEFAULTS.superDn = Meteor.settings?.ldap?.superDn || ""
LDAP_DEFAULTS.superPass = Meteor.settings?.ldap?.superPass || "123456"
LDAP_DEFAULTS.admins = Meteor.settings?.ldap?.admins || ['']
LDAP_DEFAULTS.searchResultsProfileMap = [
{
resultKey: "uid"
profileProperty: "username"
}
{
resultKey: "dn"
profileProperty: "dn"
}
{
resultKey: "mail"
profileProperty: "email"
}
]
roles = ['admin', 'editor', 'viewer']
for role in roles
if Meteor.roles.find(name: role).count == 0
Roles.createRole role
Meteor.methods
getRolesForUser: (targetUser) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.error 403, 'Access denied'
Roles.getRolesForUser targetUser
updateRoles: (userId, roles) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.setUserRoles userId, roles, Roles.GLOBAL_GROUP
addRole: (userId, role) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.addUsersToRoles userId, [role], Roles.GLOBAL_GROUP
| 198881 | Meteor.startup ->
LDAP_DEFAULTS.url = Meteor.settings?.ldap?.serverAddr || "ldap://ldap"
LDAP_DEFAULTS.port = Meteor.settings?.ldap?.serverPort || "389"
LDAP_DEFAULTS.base = Meteor.settings?.ldap?.baseDn || "dc=ictu,dc=nl"
LDAP_DEFAULTS.superDn = Meteor.settings?.ldap?.superDn || ""
LDAP_DEFAULTS.superPass = Meteor.settings?.ldap?.superPass || "<PASSWORD>"
LDAP_DEFAULTS.admins = Meteor.settings?.ldap?.admins || ['']
LDAP_DEFAULTS.searchResultsProfileMap = [
{
resultKey: "uid"
profileProperty: "username"
}
{
resultKey: "dn"
profileProperty: "dn"
}
{
resultKey: "mail"
profileProperty: "email"
}
]
roles = ['admin', 'editor', 'viewer']
for role in roles
if Meteor.roles.find(name: role).count == 0
Roles.createRole role
Meteor.methods
getRolesForUser: (targetUser) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.error 403, 'Access denied'
Roles.getRolesForUser targetUser
updateRoles: (userId, roles) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.setUserRoles userId, roles, Roles.GLOBAL_GROUP
addRole: (userId, role) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.addUsersToRoles userId, [role], Roles.GLOBAL_GROUP
| true | Meteor.startup ->
LDAP_DEFAULTS.url = Meteor.settings?.ldap?.serverAddr || "ldap://ldap"
LDAP_DEFAULTS.port = Meteor.settings?.ldap?.serverPort || "389"
LDAP_DEFAULTS.base = Meteor.settings?.ldap?.baseDn || "dc=ictu,dc=nl"
LDAP_DEFAULTS.superDn = Meteor.settings?.ldap?.superDn || ""
LDAP_DEFAULTS.superPass = Meteor.settings?.ldap?.superPass || "PI:PASSWORD:<PASSWORD>END_PI"
LDAP_DEFAULTS.admins = Meteor.settings?.ldap?.admins || ['']
LDAP_DEFAULTS.searchResultsProfileMap = [
{
resultKey: "uid"
profileProperty: "username"
}
{
resultKey: "dn"
profileProperty: "dn"
}
{
resultKey: "mail"
profileProperty: "email"
}
]
roles = ['admin', 'editor', 'viewer']
for role in roles
if Meteor.roles.find(name: role).count == 0
Roles.createRole role
Meteor.methods
getRolesForUser: (targetUser) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.error 403, 'Access denied'
Roles.getRolesForUser targetUser
updateRoles: (userId, roles) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.setUserRoles userId, roles, Roles.GLOBAL_GROUP
addRole: (userId, role) ->
loggedInUser = Meteor.user
if !loggedInUser or !Roles.userIsInRole(loggedInUser, ['admin'], Roles.GLOBAL_GROUP)
throw new Meteor.Error 403, 'Access denied'
Roles.addUsersToRoles userId, [role], Roles.GLOBAL_GROUP
|
[
{
"context": "9.2/bin/ruby' }\n ]\n\nLittleQuacker =\n name: 'LittleQuacker'\n duck:\n quacks: yes\n swims: no\n\nclass To",
"end": 5341,
"score": 0.9429185390472412,
"start": 5328,
"tag": "NAME",
"value": "LittleQuacker"
},
{
"context": "= @entity[key]\n\n beforeEac... | windows/backend/node_modules/livereload-core/node_modules/newreactive/test/reactive_test.iced | Acidburn0zzz/LiveReload | 723 | { ok, equal, deepEqual } = require 'assert'
_ = require 'underscore'
R = require "../#{process.env.JSLIB or 'lib'}/reactive"
throws = (smt, func) ->
require('assert').throws func, smt
Object.defineProperty R.Universe::, 'kindNames', get: -> _.pluck(@kinds, 'kind').sort()
catchAndReturnError = (func) ->
try func(); return null
catch err then return err
Advise =
before: (object, key, decorator) ->
original = object[key]
object[key] = (args...) ->
decorator.call(object, args...)
original.call(object, args...)
helpers =
usesUniverse: ->
beforeEach -> @universe = new R.Universe()
afterEach -> @universe.dispose(); delete @universe
return this
usesRootEntity: ->
beforeEach ->
@universe.define(EmptyEntity)
@root = @universe.create('EmptyEntity')
@performAndWait = (done, name, func) =>
@universe.performAndWait(func, done)
return this
defines: (entityClasses...) ->
beforeEach ->
@universe.define(entityClasses)
return this
creates: (fieldName, kind, attributes={}) ->
beforeEach ->
this[fieldName] = @universe.create(kind, attributes)
return this
performAndWait: (name, func) ->
beforeEach (done) ->
@performAndWait done, name, func.bind(this)
createsInJob: (fieldName, kind, attributes={}) ->
helpers.performAndWait "initialize", ->
this[fieldName] = @universe.create(kind, attributes)
return this
usesLog: ->
beforeEach ->
@log = []
@log.equal = (expected) =>
equal JSON.stringify(@log, null, 2), JSON.stringify(expected, null, 2)
@log.splice(0, @log.length)
@log.pusher = (line) =>
return =>
@log.push(line); undefined
@log.wrap = (line, func) =>
log = @log
return (args...) ->
log.push line
return func.apply(this, args)
@log.inspectTaskEvents = (task, events) ->
if 'finalized' in events
task.on 'finalized', @pusher("#{task}.finalized")
if 'completed' in events
task.on 'completed', @pusher("#{task}.completed")
return this
afterEach ->
delete @log
return this
inspectReads: ->
beforeEach ->
Advise.before @universe, 'reading', (entity, topic) => @log.push("reading #{entity._r_id} #{topic}") if @log
return this
inspectBroadcasts: ->
beforeEach ->
Advise.before @universe, 'broadcast', (entity, topic) => @log.push("change #{entity._r_id} #{topic}") if @log
return this
inspectSubscriptions: ->
beforeEach ->
Advise.before @universe, 'subscribe', (dependent, entity, topicSet) =>
if @log
topics = (if topicSet then Object.keys(topicSet).join(',') else 'null')
@log.push("sub #{dependent._r_id} #{entity._r_id} #{topics}")
return this
class EmptyEntity
class SimpleEntity_WithAutomaticalBlock
schema:
someValue: { type: 'int' }
automatically_collect_something: ->
(@list or= []).push @someValue
class BaseEntity
schema:
someValue: { type: 'int', default: 42 }
initialize: ->
@baseInitialized = yes
boringMethod: ->
24
automatically_copy_someValue: ->
@someValueCopy = @someValue
class DerivedEntity
schema:
$extends: 'BaseEntity'
anotherValue: { type: 'int', default: 33 }
initialize: ->
@derivedInitialized = yes
veryBoringMethod: ->
12
automatically_copy_anotherValue: ->
@anotherValueCopy = @anotherValue
class IncludingEntity
schema:
$includes: BaseEntity
class MyApp_SomeMethods
something: (x) -> x + 1
class MyApp_OtherMethods
another: (x) -> x * 2
class MyApp_EventCount
schema:
eventCount: { type: 'int' }
class MyApp_EventCount_WithDefault
schema:
eventCount: { type: 'int', default: 42 }
class MyApp_DoubleEventCount
schema:
doubleEventCount: { type: 'int' }
compute_doubleEventCount: ->
@eventCount * 2
class MyApp_Status1
schema:
status: { type: 'string' }
compute_status: ->
"eventCount is #{@eventCount}"
class MyApp_Status2
schema:
status: { type: 'string' }
compute_status: ->
"doubleEventCount ends with #{@doubleEventCount % 10}"
class MyApp_SuperStatus
schema:
superStatus: { type: 'string' }
compute_superStatus: ->
"(#{@status})"
class MyApp_PathCollection
schema:
pluginPaths: { collection: 'list', type: 'string' }
class MyApp_RubyCollection1
schema:
rubies: { collection: 'list', type: 'LRRuby' }
class LRRuby_Basics
schema:
path: { type: String }
class LRRuby_Version
schema:
version: { type: String }
compute_version: (callback) ->
setTimeout =>
if @path is '/usr/bin/ruby'
callback(null, "1.8.7")
else
callback(null, "1.9.3")
, 5
class MyApp_SystemRubies
schema:
systemRubies: { collection: 'list', type: 'LRRuby' }
compute_systemRubies: (callback) ->
process.nextTick ->
callback null, [
{ path: '/usr/bin/ruby' }
{ path: '/usr/local/bin/ruby' }
{ path: '~/.rvm/rubies/rvm-ruby-1.9.2/bin/ruby' }
]
LittleQuacker =
name: 'LittleQuacker'
duck:
quacks: yes
swims: no
class Tom_InSouthboundDuckling
schema:
southernFriedDuck: { type: 'LittleQuacker?' }
hunger: ->
100 - (@southernFriedDuck?.calories or 0)
class Jerry_MutualRef
schema:
enemy: { type: 'Tom?' }
class Tom_MutualRef
schema:
enemy: { type: 'Jerry?' }
describe 'R', ->
it "exports VERSION", ->
ok R.VERSION.match /^\d+\.\d+\.\d+$/
describe 'R.Universe', ->
helpers.usesUniverse()
describe ".uniqueId(className, [detail]) return value", ->
it "is a string", ->
equal typeof(@universe.uniqueId('Foo')), 'string'
it "is composed of the prefix followed by some digits", ->
ok @universe.uniqueId('Foo').match /^Foo\d+$/
it "is different on each call", ->
ok @universe.uniqueId('Foo') != @universe.uniqueId('Foo')
describe "(when the <detail> argument is specified)", ->
it "has an alphanumeric representation of <detail> appended to the id", ->
id = @universe.uniqueId('Foo', 'Bar Boz')
ok id.match(/^Foo\d+_Bar_Boz$/), "id '#{id}' does not match"
describe '#define(klass)', ->
describe "given a regular class", ->
helpers.defines(EmptyEntity)
it "defines a kind matching the given class name", ->
deepEqual @universe.kindNames, ['EmptyEntity']
it "adds the given class to the list of the kind's mixins", ->
deepEqual @universe.kinds.EmptyEntity.klasses, [EmptyEntity]
describe "given a class with a two-part name like Foo_Bar", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
it "uses only the first part as the entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe "given multiple classes for the same entity", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
beforeEach -> @universe.define(MyApp_OtherMethods)
it "merges them into a single entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe '#create(kind, [attributes])', ->
describe "given a valid kind", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "returns a new entity of that kind", ->
ok @entity.isReactiveEntity
equal @entity.kind, 'MyApp'
describe "given an unknown kind", ->
beforeEach -> @err = catchAndReturnError => @universe.create('NonExistent')
it "throws an error", ->
equal @err.message, "R.create: unknown entity kind 'NonExistent'"
describe 'R.Entity', ->
helpers.usesUniverse().usesRootEntity()
describe ".constructor.name", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "matches entity kind", ->
equal @entity.constructor.name, 'MyApp'
describe "composed of two classes", ->
helpers.defines(MyApp_SomeMethods, MyApp_OtherMethods).creates('entity', 'MyApp')
it "has the methods defined in both classes", ->
equal @entity.something(10), 11
equal @entity.another(10), 20
describe "with a magical automatically_ block", ->
helpers.defines(SimpleEntity_WithAutomaticalBlock).createsInJob('entity', 'SimpleEntity', someValue: 42)
it "runs the block after entity creation", ->
deepEqual @entity.list, [42]
describe "after the block's dependencies are updated", ->
helpers.performAndWait "update someValue", ->
@entity.someValue = 33
it "reruns the block after entity creation", ->
deepEqual @entity.list, [42, 33]
describe "that extends another entity", ->
helpers.defines(BaseEntity, DerivedEntity).createsInJob('entity', 'DerivedEntity')
it "has its own initializers invoked", ->
ok @entity.derivedInitialized
it "inherits the base entity's initializers", ->
ok @entity.baseInitialized
it "has the derived entity's attributes", ->
equal @entity.anotherValue, 33
it "inherits the base entity's attributes", ->
equal @entity.someValue, 42
it "has the derived entity's methods", ->
equal @entity.veryBoringMethod(), 12
it "inherits the base entity's methods", ->
equal @entity.boringMethod(), 24
it "runs the derived entity's automatic blocks", ->
equal @entity.anotherValueCopy, 33
it "inherits the base entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that includes another entity", ->
helpers.defines(IncludingEntity).createsInJob('entity', 'IncludingEntity')
it "obtains the included entity's initializers", ->
ok @entity.baseInitialized
it "obtains the included entity's attributes", ->
equal @entity.someValue, 42
it "obtains the included entity's methods", ->
equal @entity.boringMethod(), 24
it "obtains the included entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that references a non-existent entity", ->
it "can be defined", ->
@universe.define(Tom_MutualRef)
it "cannot be instantiated", ->
@universe.define(Tom_MutualRef)
throws /Unknown type/, =>
tom = @universe.create('Tom')
describe "with a cyclic reference to another entity", ->
it "can be defined and instantiated", ->
@universe.define(Tom_MutualRef, Jerry_MutualRef)
tom = @universe.create('Tom')
jerry = @universe.create('Jerry')
describe 'R.Entity attribute', ->
helpers.usesUniverse().usesLog()
describe "(a boring regular one)", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "can be initialized by passing a value into constructor", ->
equal @entity.eventCount, 10
describe "when read via .get(attr)", ->
helpers.inspectReads()
beforeEach -> @value = @entity.get('eventCount')
it "returns the current value", ->
equal @value, 10
it "records the dependency", ->
@log.equal ["reading MyApp1 eventCount"]
it "can be read directly", ->
equal @entity.eventCount, 10
it "can be set via .set()", ->
@entity.set('eventCount', 20)
equal @entity.eventCount, 20
describe "when set directly", ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 20
it "updates the value", ->
@entity.eventCount = 20
equal @entity.eventCount, 20
it "broadcasts a change event", ->
@log.equal ["change MyApp1 eventCount"]
describe 'when set to the same value', ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 10
it "does not broadcast a change event", ->
@log.equal []
describe "not defined by the schema", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "throws an error when reading via #get", ->
equal((catchAndReturnError => @entity.get('missingAttr'))?.message, "Unknown attribute 'missingAttr'")
it "throws an error when writing via #set", ->
equal((catchAndReturnError => @entity.set('missingAttr', 20))?.message, "Unknown attribute 'missingAttr'")
describe "with a default value", ->
helpers.usesRootEntity().defines(MyApp_EventCount_WithDefault).creates('entity', 'MyApp')
it "is initially set to the provided default value", ->
equal @entity.eventCount, 42
describe "with a type", ->
helpers.usesRootEntity().defines(MyApp_EventCount).creates('entity', 'MyApp')
it "is initially set to the type-specific default value", ->
equal @entity.eventCount, 0
describe "when assigned a value of a different compatible type", ->
it "coerces the value into the correct type", ->
@entity.eventCount = "11"
equal @entity.eventCount, 11
equal (typeof @entity.eventCount), 'number'
describe "when assigned a value of an incompatible type", ->
it "throws an error", ->
throws /expected an integer/, =>
@entity.eventCount = []
throws /expected an integer/, =>
@entity.eventCount = null
describe "with a duck type", ->
helpers.defines(LittleQuacker, Tom_InSouthboundDuckling).creates('tom', 'Tom')
describe "when provided with the the right duck", ->
it "eats it promptly", ->
@tom.southernFriedDuck = { quacks: yes, swims: no, calories: 100 }
equal @tom.hunger(), 0
describe "when provided with an ignited bomb", ->
it "chokes and fails to eat", ->
throws /expected a LittleQuacker-like object/, =>
# 1 kg TNT generates exactly 1 Mcal energy; assuming a 250g duckling and a weight match here
@tom.southernFriedDuck = { boomIn: 3, calories: 250000 }
equal @tom.hunger(), 100
describe "with a compute function", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_Status1)
helpers.createsInJob('entity', 'MyApp', eventCount: 10)
it "is initially set to the default value", ->
equal @universe.create('MyApp', eventCount: 10).status, ""
it "is eventually set to the computed value", ->
equal @entity.status, "eventCount is 10"
describe "when the source value is updated", ->
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 20
it "the computed values is also eventually updated", ->
equal @entity.status, "eventCount is 20"
describe "with an async compute function", ->
helpers.usesRootEntity().defines(LRRuby_Basics, LRRuby_Version)
helpers.createsInJob('entity', 'LRRuby', path: '/usr/bin/ruby')
it "is initially set to the default value", ->
equal @universe.create('LRRuby', path: '/usr/bin/ruby').version, ""
it "is eventually set to the computed value", ->
equal @entity.version, "1.8.7"
describe "when the source value is updated", ->
helpers.performAndWait "update ruby.path", ->
@entity.path = '/usr/local/bin/ruby'
it "the computed values is also eventually updated", ->
equal @entity.version, "1.9.3"
describe "with a chain of dependent computed attributes", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_SuperStatus, MyApp_Status2)
helpers.createsInJob('entity', 'MyApp', eventCount: 14)
it "all computed attributes are eventually set to the final values", ->
equal @entity.doubleEventCount, 28
equal @entity.status, "doubleEventCount ends with 8"
equal @entity.superStatus, "(doubleEventCount ends with 8)"
describe "when the source attribute is updated", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 16
it "the computed attributes are all eventually updated", ->
equal @entity.doubleEventCount, 32
equal @entity.status, "doubleEventCount ends with 2"
equal @entity.superStatus, "(doubleEventCount ends with 2)"
it "change events are broadcasted for the source and computed attributes", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount", "change MyApp1 status", "change MyApp1 superStatus"]
describe "when the source attribute is set to the same value", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 14
it "no change events are broadcasted", ->
@log.equal []
describe "when the source attribute is updated, but intermediate attribute value stays the same", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 9
it "no change events are broadcasted for unmodified values", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount"]
describe "R.Task", ->
helpers.usesUniverse().usesRootEntity().usesLog()
describe "with ONESHOT type, sync function and no subtasks", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, @log.pusher("RTask1_smt.run")))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_smt.run'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type, sync function and a subtask", ->
beforeEach ->
func1 = =>
@log.push "RTask1_parent.start"
@task2.schedule()
@log.push "RTask1_parent.end"
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'parent', R.TaskDef.ONESHOT, func1, multiuse: no, reactive: no))
@task2 = new R.Task(@root, new R.TaskDef(@universe, 'child', R.TaskDef.ONESHOT, @log.pusher("RTask2_child.run"), multiuse: no, reactive: no))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
.inspectTaskEvents(@task2, ['completed', 'finalized'])
it "reports parent task completion, then subtask completion/finalization, then parent task finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_parent.start'
'RTask1_parent.end'
'RTask1_parent.completed'
'RTask2_child.run'
'RTask2_child.completed'
'RTask2_child.finalized'
'RTask1_parent.finalized'
]
done()
describe "with AUTOREPEAT type and a sync function that loves to change its dependencies", ->
helpers.inspectSubscriptions().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_Status1).creates('entity', 'MyApp', eventCount: 10)
func = ->
for key in @keys
dummy = @entity[key]
beforeEach ->
@keys = ['eventCount', 'status']
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
it "subscribes to the initial set topics", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
]
done()
describe "when a dependency is removed", ->
helpers.performAndWait "update eventCount", ->
@keys = ['eventCount']
@entity.eventCount = 11
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount'
]
describe "when a dependency is added", ->
helpers.performAndWait "update eventCount", ->
@keys = ['eventCount', 'doubleEventCount']
@entity.eventCount = 12
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,doubleEventCount'
]
describe "with AUTOREPEAT type, sync function and no subtasks", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
func = ->
@log.push "RTask1_smt.run eventCount=#{@entity.eventCount}"
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
it "accumulates dependencies when reading an entity", ->
await @task1.on 'finalized', defer()
deps = @task1.completedInvocation._topicsByDepId
deepEqual Object.keys(deps).sort(), ['MyApp1']
deepEqual Object.keys(deps.MyApp1).sort(), ['eventCount']
it "runs again when dependencies are updated", (done) ->
await @task1.on 'finalized', defer()
await @performAndWait defer(), "update eventCount", =>
@entity.eventCount = 20
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
'RTask1_smt.run eventCount=20'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type and a failing sync function", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, (-> throw new Error 'ETEST')))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports a failed completion", (done) ->
await @task1.schedule().waitCompleted(defer())
equal "#{@task1.lastError}", "Error: ETEST"
done()
describe "R.Collection", ->
helpers.usesUniverse().usesRootEntity().usesLog()
.defines(LRRuby_Basics)
describe 'basic functionality', ->
helpers.defines(MyApp_RubyCollection1).creates('entity', 'MyApp')
it "works fine for a proof of concept", (done) ->
equal typeof(@entity.rubies), 'object'
ok @entity.rubies instanceof R.ListCollection
equal @entity.rubies.all.length, 0
ruby = @universe.create('LRRuby')
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 1
task = new R.Task @root, new R.TaskDef @universe, "Add Ruby", R.TaskDef.AUTOREPEAT, =>
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 2
await task.schedule().waitFinalized defer()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 1, "Wrong number of subscribed queries (#{keys.length} instead of 1), actual subscribed IDs: #{JSON.stringify keys}"
task.dispose()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 0, "Wrong number of subscribed queries (#{keys.length} instead of 0), actual subscribed IDs: #{JSON.stringify keys}"
done()
describe "computed source collections", ->
helpers.defines(MyApp_SystemRubies).createsInJob('entity', 'MyApp')
it "work", (done) ->
equal @entity.systemRubies.all.length, 3
equal @entity.systemRubies.all[0].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[1].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[2].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[0].path, '/usr/bin/ruby'
equal @entity.systemRubies.all[1].path, '/usr/local/bin/ruby'
done()
| 119172 | { ok, equal, deepEqual } = require 'assert'
_ = require 'underscore'
R = require "../#{process.env.JSLIB or 'lib'}/reactive"
throws = (smt, func) ->
require('assert').throws func, smt
Object.defineProperty R.Universe::, 'kindNames', get: -> _.pluck(@kinds, 'kind').sort()
catchAndReturnError = (func) ->
try func(); return null
catch err then return err
Advise =
before: (object, key, decorator) ->
original = object[key]
object[key] = (args...) ->
decorator.call(object, args...)
original.call(object, args...)
helpers =
usesUniverse: ->
beforeEach -> @universe = new R.Universe()
afterEach -> @universe.dispose(); delete @universe
return this
usesRootEntity: ->
beforeEach ->
@universe.define(EmptyEntity)
@root = @universe.create('EmptyEntity')
@performAndWait = (done, name, func) =>
@universe.performAndWait(func, done)
return this
defines: (entityClasses...) ->
beforeEach ->
@universe.define(entityClasses)
return this
creates: (fieldName, kind, attributes={}) ->
beforeEach ->
this[fieldName] = @universe.create(kind, attributes)
return this
performAndWait: (name, func) ->
beforeEach (done) ->
@performAndWait done, name, func.bind(this)
createsInJob: (fieldName, kind, attributes={}) ->
helpers.performAndWait "initialize", ->
this[fieldName] = @universe.create(kind, attributes)
return this
usesLog: ->
beforeEach ->
@log = []
@log.equal = (expected) =>
equal JSON.stringify(@log, null, 2), JSON.stringify(expected, null, 2)
@log.splice(0, @log.length)
@log.pusher = (line) =>
return =>
@log.push(line); undefined
@log.wrap = (line, func) =>
log = @log
return (args...) ->
log.push line
return func.apply(this, args)
@log.inspectTaskEvents = (task, events) ->
if 'finalized' in events
task.on 'finalized', @pusher("#{task}.finalized")
if 'completed' in events
task.on 'completed', @pusher("#{task}.completed")
return this
afterEach ->
delete @log
return this
inspectReads: ->
beforeEach ->
Advise.before @universe, 'reading', (entity, topic) => @log.push("reading #{entity._r_id} #{topic}") if @log
return this
inspectBroadcasts: ->
beforeEach ->
Advise.before @universe, 'broadcast', (entity, topic) => @log.push("change #{entity._r_id} #{topic}") if @log
return this
inspectSubscriptions: ->
beforeEach ->
Advise.before @universe, 'subscribe', (dependent, entity, topicSet) =>
if @log
topics = (if topicSet then Object.keys(topicSet).join(',') else 'null')
@log.push("sub #{dependent._r_id} #{entity._r_id} #{topics}")
return this
class EmptyEntity
class SimpleEntity_WithAutomaticalBlock
schema:
someValue: { type: 'int' }
automatically_collect_something: ->
(@list or= []).push @someValue
class BaseEntity
schema:
someValue: { type: 'int', default: 42 }
initialize: ->
@baseInitialized = yes
boringMethod: ->
24
automatically_copy_someValue: ->
@someValueCopy = @someValue
class DerivedEntity
schema:
$extends: 'BaseEntity'
anotherValue: { type: 'int', default: 33 }
initialize: ->
@derivedInitialized = yes
veryBoringMethod: ->
12
automatically_copy_anotherValue: ->
@anotherValueCopy = @anotherValue
class IncludingEntity
schema:
$includes: BaseEntity
class MyApp_SomeMethods
something: (x) -> x + 1
class MyApp_OtherMethods
another: (x) -> x * 2
class MyApp_EventCount
schema:
eventCount: { type: 'int' }
class MyApp_EventCount_WithDefault
schema:
eventCount: { type: 'int', default: 42 }
class MyApp_DoubleEventCount
schema:
doubleEventCount: { type: 'int' }
compute_doubleEventCount: ->
@eventCount * 2
class MyApp_Status1
schema:
status: { type: 'string' }
compute_status: ->
"eventCount is #{@eventCount}"
class MyApp_Status2
schema:
status: { type: 'string' }
compute_status: ->
"doubleEventCount ends with #{@doubleEventCount % 10}"
class MyApp_SuperStatus
schema:
superStatus: { type: 'string' }
compute_superStatus: ->
"(#{@status})"
class MyApp_PathCollection
schema:
pluginPaths: { collection: 'list', type: 'string' }
class MyApp_RubyCollection1
schema:
rubies: { collection: 'list', type: 'LRRuby' }
class LRRuby_Basics
schema:
path: { type: String }
class LRRuby_Version
schema:
version: { type: String }
compute_version: (callback) ->
setTimeout =>
if @path is '/usr/bin/ruby'
callback(null, "1.8.7")
else
callback(null, "1.9.3")
, 5
class MyApp_SystemRubies
schema:
systemRubies: { collection: 'list', type: 'LRRuby' }
compute_systemRubies: (callback) ->
process.nextTick ->
callback null, [
{ path: '/usr/bin/ruby' }
{ path: '/usr/local/bin/ruby' }
{ path: '~/.rvm/rubies/rvm-ruby-1.9.2/bin/ruby' }
]
LittleQuacker =
name: '<NAME>'
duck:
quacks: yes
swims: no
class Tom_InSouthboundDuckling
schema:
southernFriedDuck: { type: 'LittleQuacker?' }
hunger: ->
100 - (@southernFriedDuck?.calories or 0)
class Jerry_MutualRef
schema:
enemy: { type: 'Tom?' }
class Tom_MutualRef
schema:
enemy: { type: 'Jerry?' }
describe 'R', ->
it "exports VERSION", ->
ok R.VERSION.match /^\d+\.\d+\.\d+$/
describe 'R.Universe', ->
helpers.usesUniverse()
describe ".uniqueId(className, [detail]) return value", ->
it "is a string", ->
equal typeof(@universe.uniqueId('Foo')), 'string'
it "is composed of the prefix followed by some digits", ->
ok @universe.uniqueId('Foo').match /^Foo\d+$/
it "is different on each call", ->
ok @universe.uniqueId('Foo') != @universe.uniqueId('Foo')
describe "(when the <detail> argument is specified)", ->
it "has an alphanumeric representation of <detail> appended to the id", ->
id = @universe.uniqueId('Foo', 'Bar Boz')
ok id.match(/^Foo\d+_Bar_Boz$/), "id '#{id}' does not match"
describe '#define(klass)', ->
describe "given a regular class", ->
helpers.defines(EmptyEntity)
it "defines a kind matching the given class name", ->
deepEqual @universe.kindNames, ['EmptyEntity']
it "adds the given class to the list of the kind's mixins", ->
deepEqual @universe.kinds.EmptyEntity.klasses, [EmptyEntity]
describe "given a class with a two-part name like Foo_Bar", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
it "uses only the first part as the entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe "given multiple classes for the same entity", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
beforeEach -> @universe.define(MyApp_OtherMethods)
it "merges them into a single entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe '#create(kind, [attributes])', ->
describe "given a valid kind", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "returns a new entity of that kind", ->
ok @entity.isReactiveEntity
equal @entity.kind, 'MyApp'
describe "given an unknown kind", ->
beforeEach -> @err = catchAndReturnError => @universe.create('NonExistent')
it "throws an error", ->
equal @err.message, "R.create: unknown entity kind 'NonExistent'"
describe 'R.Entity', ->
helpers.usesUniverse().usesRootEntity()
describe ".constructor.name", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "matches entity kind", ->
equal @entity.constructor.name, 'MyApp'
describe "composed of two classes", ->
helpers.defines(MyApp_SomeMethods, MyApp_OtherMethods).creates('entity', 'MyApp')
it "has the methods defined in both classes", ->
equal @entity.something(10), 11
equal @entity.another(10), 20
describe "with a magical automatically_ block", ->
helpers.defines(SimpleEntity_WithAutomaticalBlock).createsInJob('entity', 'SimpleEntity', someValue: 42)
it "runs the block after entity creation", ->
deepEqual @entity.list, [42]
describe "after the block's dependencies are updated", ->
helpers.performAndWait "update someValue", ->
@entity.someValue = 33
it "reruns the block after entity creation", ->
deepEqual @entity.list, [42, 33]
describe "that extends another entity", ->
helpers.defines(BaseEntity, DerivedEntity).createsInJob('entity', 'DerivedEntity')
it "has its own initializers invoked", ->
ok @entity.derivedInitialized
it "inherits the base entity's initializers", ->
ok @entity.baseInitialized
it "has the derived entity's attributes", ->
equal @entity.anotherValue, 33
it "inherits the base entity's attributes", ->
equal @entity.someValue, 42
it "has the derived entity's methods", ->
equal @entity.veryBoringMethod(), 12
it "inherits the base entity's methods", ->
equal @entity.boringMethod(), 24
it "runs the derived entity's automatic blocks", ->
equal @entity.anotherValueCopy, 33
it "inherits the base entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that includes another entity", ->
helpers.defines(IncludingEntity).createsInJob('entity', 'IncludingEntity')
it "obtains the included entity's initializers", ->
ok @entity.baseInitialized
it "obtains the included entity's attributes", ->
equal @entity.someValue, 42
it "obtains the included entity's methods", ->
equal @entity.boringMethod(), 24
it "obtains the included entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that references a non-existent entity", ->
it "can be defined", ->
@universe.define(Tom_MutualRef)
it "cannot be instantiated", ->
@universe.define(Tom_MutualRef)
throws /Unknown type/, =>
tom = @universe.create('Tom')
describe "with a cyclic reference to another entity", ->
it "can be defined and instantiated", ->
@universe.define(Tom_MutualRef, Jerry_MutualRef)
tom = @universe.create('Tom')
jerry = @universe.create('Jerry')
describe 'R.Entity attribute', ->
helpers.usesUniverse().usesLog()
describe "(a boring regular one)", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "can be initialized by passing a value into constructor", ->
equal @entity.eventCount, 10
describe "when read via .get(attr)", ->
helpers.inspectReads()
beforeEach -> @value = @entity.get('eventCount')
it "returns the current value", ->
equal @value, 10
it "records the dependency", ->
@log.equal ["reading MyApp1 eventCount"]
it "can be read directly", ->
equal @entity.eventCount, 10
it "can be set via .set()", ->
@entity.set('eventCount', 20)
equal @entity.eventCount, 20
describe "when set directly", ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 20
it "updates the value", ->
@entity.eventCount = 20
equal @entity.eventCount, 20
it "broadcasts a change event", ->
@log.equal ["change MyApp1 eventCount"]
describe 'when set to the same value', ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 10
it "does not broadcast a change event", ->
@log.equal []
describe "not defined by the schema", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "throws an error when reading via #get", ->
equal((catchAndReturnError => @entity.get('missingAttr'))?.message, "Unknown attribute 'missingAttr'")
it "throws an error when writing via #set", ->
equal((catchAndReturnError => @entity.set('missingAttr', 20))?.message, "Unknown attribute 'missingAttr'")
describe "with a default value", ->
helpers.usesRootEntity().defines(MyApp_EventCount_WithDefault).creates('entity', 'MyApp')
it "is initially set to the provided default value", ->
equal @entity.eventCount, 42
describe "with a type", ->
helpers.usesRootEntity().defines(MyApp_EventCount).creates('entity', 'MyApp')
it "is initially set to the type-specific default value", ->
equal @entity.eventCount, 0
describe "when assigned a value of a different compatible type", ->
it "coerces the value into the correct type", ->
@entity.eventCount = "11"
equal @entity.eventCount, 11
equal (typeof @entity.eventCount), 'number'
describe "when assigned a value of an incompatible type", ->
it "throws an error", ->
throws /expected an integer/, =>
@entity.eventCount = []
throws /expected an integer/, =>
@entity.eventCount = null
describe "with a duck type", ->
helpers.defines(LittleQuacker, Tom_InSouthboundDuckling).creates('tom', 'Tom')
describe "when provided with the the right duck", ->
it "eats it promptly", ->
@tom.southernFriedDuck = { quacks: yes, swims: no, calories: 100 }
equal @tom.hunger(), 0
describe "when provided with an ignited bomb", ->
it "chokes and fails to eat", ->
throws /expected a LittleQuacker-like object/, =>
# 1 kg TNT generates exactly 1 Mcal energy; assuming a 250g duckling and a weight match here
@tom.southernFriedDuck = { boomIn: 3, calories: 250000 }
equal @tom.hunger(), 100
describe "with a compute function", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_Status1)
helpers.createsInJob('entity', 'MyApp', eventCount: 10)
it "is initially set to the default value", ->
equal @universe.create('MyApp', eventCount: 10).status, ""
it "is eventually set to the computed value", ->
equal @entity.status, "eventCount is 10"
describe "when the source value is updated", ->
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 20
it "the computed values is also eventually updated", ->
equal @entity.status, "eventCount is 20"
describe "with an async compute function", ->
helpers.usesRootEntity().defines(LRRuby_Basics, LRRuby_Version)
helpers.createsInJob('entity', 'LRRuby', path: '/usr/bin/ruby')
it "is initially set to the default value", ->
equal @universe.create('LRRuby', path: '/usr/bin/ruby').version, ""
it "is eventually set to the computed value", ->
equal @entity.version, "1.8.7"
describe "when the source value is updated", ->
helpers.performAndWait "update ruby.path", ->
@entity.path = '/usr/local/bin/ruby'
it "the computed values is also eventually updated", ->
equal @entity.version, "1.9.3"
describe "with a chain of dependent computed attributes", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_SuperStatus, MyApp_Status2)
helpers.createsInJob('entity', 'MyApp', eventCount: 14)
it "all computed attributes are eventually set to the final values", ->
equal @entity.doubleEventCount, 28
equal @entity.status, "doubleEventCount ends with 8"
equal @entity.superStatus, "(doubleEventCount ends with 8)"
describe "when the source attribute is updated", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 16
it "the computed attributes are all eventually updated", ->
equal @entity.doubleEventCount, 32
equal @entity.status, "doubleEventCount ends with 2"
equal @entity.superStatus, "(doubleEventCount ends with 2)"
it "change events are broadcasted for the source and computed attributes", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount", "change MyApp1 status", "change MyApp1 superStatus"]
describe "when the source attribute is set to the same value", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 14
it "no change events are broadcasted", ->
@log.equal []
describe "when the source attribute is updated, but intermediate attribute value stays the same", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 9
it "no change events are broadcasted for unmodified values", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount"]
describe "R.Task", ->
helpers.usesUniverse().usesRootEntity().usesLog()
describe "with ONESHOT type, sync function and no subtasks", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, @log.pusher("RTask1_smt.run")))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_smt.run'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type, sync function and a subtask", ->
beforeEach ->
func1 = =>
@log.push "RTask1_parent.start"
@task2.schedule()
@log.push "RTask1_parent.end"
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'parent', R.TaskDef.ONESHOT, func1, multiuse: no, reactive: no))
@task2 = new R.Task(@root, new R.TaskDef(@universe, 'child', R.TaskDef.ONESHOT, @log.pusher("RTask2_child.run"), multiuse: no, reactive: no))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
.inspectTaskEvents(@task2, ['completed', 'finalized'])
it "reports parent task completion, then subtask completion/finalization, then parent task finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_parent.start'
'RTask1_parent.end'
'RTask1_parent.completed'
'RTask2_child.run'
'RTask2_child.completed'
'RTask2_child.finalized'
'RTask1_parent.finalized'
]
done()
describe "with AUTOREPEAT type and a sync function that loves to change its dependencies", ->
helpers.inspectSubscriptions().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_Status1).creates('entity', 'MyApp', eventCount: 10)
func = ->
for key in @keys
dummy = @entity[key]
beforeEach ->
@keys = ['<KEY>status']
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
it "subscribes to the initial set topics", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
]
done()
describe "when a dependency is removed", ->
helpers.performAndWait "update eventCount", ->
@keys = ['<KEY>']
@entity.eventCount = 11
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount'
]
describe "when a dependency is added", ->
helpers.performAndWait "update eventCount", ->
@keys = ['<KEY>', '<KEY>']
@entity.eventCount = 12
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,doubleEventCount'
]
describe "with AUTOREPEAT type, sync function and no subtasks", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
func = ->
@log.push "RTask1_smt.run eventCount=#{@entity.eventCount}"
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
it "accumulates dependencies when reading an entity", ->
await @task1.on 'finalized', defer()
deps = @task1.completedInvocation._topicsByDepId
deepEqual Object.keys(deps).sort(), ['MyApp1']
deepEqual Object.keys(deps.MyApp1).sort(), ['eventCount']
it "runs again when dependencies are updated", (done) ->
await @task1.on 'finalized', defer()
await @performAndWait defer(), "update eventCount", =>
@entity.eventCount = 20
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
'RTask1_smt.run eventCount=20'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type and a failing sync function", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, (-> throw new Error 'ETEST')))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports a failed completion", (done) ->
await @task1.schedule().waitCompleted(defer())
equal "#{@task1.lastError}", "Error: ETEST"
done()
describe "R.Collection", ->
helpers.usesUniverse().usesRootEntity().usesLog()
.defines(LRRuby_Basics)
describe 'basic functionality', ->
helpers.defines(MyApp_RubyCollection1).creates('entity', 'MyApp')
it "works fine for a proof of concept", (done) ->
equal typeof(@entity.rubies), 'object'
ok @entity.rubies instanceof R.ListCollection
equal @entity.rubies.all.length, 0
ruby = @universe.create('LRRuby')
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 1
task = new R.Task @root, new R.TaskDef @universe, "Add Ruby", R.TaskDef.AUTOREPEAT, =>
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 2
await task.schedule().waitFinalized defer()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 1, "Wrong number of subscribed queries (#{keys.length} instead of 1), actual subscribed IDs: #{JSON.stringify keys}"
task.dispose()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 0, "Wrong number of subscribed queries (#{keys.length} instead of 0), actual subscribed IDs: #{JSON.stringify keys}"
done()
describe "computed source collections", ->
helpers.defines(MyApp_SystemRubies).createsInJob('entity', 'MyApp')
it "work", (done) ->
equal @entity.systemRubies.all.length, 3
equal @entity.systemRubies.all[0].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[1].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[2].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[0].path, '/usr/bin/ruby'
equal @entity.systemRubies.all[1].path, '/usr/local/bin/ruby'
done()
| true | { ok, equal, deepEqual } = require 'assert'
_ = require 'underscore'
R = require "../#{process.env.JSLIB or 'lib'}/reactive"
throws = (smt, func) ->
require('assert').throws func, smt
Object.defineProperty R.Universe::, 'kindNames', get: -> _.pluck(@kinds, 'kind').sort()
catchAndReturnError = (func) ->
try func(); return null
catch err then return err
Advise =
before: (object, key, decorator) ->
original = object[key]
object[key] = (args...) ->
decorator.call(object, args...)
original.call(object, args...)
helpers =
usesUniverse: ->
beforeEach -> @universe = new R.Universe()
afterEach -> @universe.dispose(); delete @universe
return this
usesRootEntity: ->
beforeEach ->
@universe.define(EmptyEntity)
@root = @universe.create('EmptyEntity')
@performAndWait = (done, name, func) =>
@universe.performAndWait(func, done)
return this
defines: (entityClasses...) ->
beforeEach ->
@universe.define(entityClasses)
return this
creates: (fieldName, kind, attributes={}) ->
beforeEach ->
this[fieldName] = @universe.create(kind, attributes)
return this
performAndWait: (name, func) ->
beforeEach (done) ->
@performAndWait done, name, func.bind(this)
createsInJob: (fieldName, kind, attributes={}) ->
helpers.performAndWait "initialize", ->
this[fieldName] = @universe.create(kind, attributes)
return this
usesLog: ->
beforeEach ->
@log = []
@log.equal = (expected) =>
equal JSON.stringify(@log, null, 2), JSON.stringify(expected, null, 2)
@log.splice(0, @log.length)
@log.pusher = (line) =>
return =>
@log.push(line); undefined
@log.wrap = (line, func) =>
log = @log
return (args...) ->
log.push line
return func.apply(this, args)
@log.inspectTaskEvents = (task, events) ->
if 'finalized' in events
task.on 'finalized', @pusher("#{task}.finalized")
if 'completed' in events
task.on 'completed', @pusher("#{task}.completed")
return this
afterEach ->
delete @log
return this
inspectReads: ->
beforeEach ->
Advise.before @universe, 'reading', (entity, topic) => @log.push("reading #{entity._r_id} #{topic}") if @log
return this
inspectBroadcasts: ->
beforeEach ->
Advise.before @universe, 'broadcast', (entity, topic) => @log.push("change #{entity._r_id} #{topic}") if @log
return this
inspectSubscriptions: ->
beforeEach ->
Advise.before @universe, 'subscribe', (dependent, entity, topicSet) =>
if @log
topics = (if topicSet then Object.keys(topicSet).join(',') else 'null')
@log.push("sub #{dependent._r_id} #{entity._r_id} #{topics}")
return this
class EmptyEntity
class SimpleEntity_WithAutomaticalBlock
schema:
someValue: { type: 'int' }
automatically_collect_something: ->
(@list or= []).push @someValue
class BaseEntity
schema:
someValue: { type: 'int', default: 42 }
initialize: ->
@baseInitialized = yes
boringMethod: ->
24
automatically_copy_someValue: ->
@someValueCopy = @someValue
class DerivedEntity
schema:
$extends: 'BaseEntity'
anotherValue: { type: 'int', default: 33 }
initialize: ->
@derivedInitialized = yes
veryBoringMethod: ->
12
automatically_copy_anotherValue: ->
@anotherValueCopy = @anotherValue
class IncludingEntity
schema:
$includes: BaseEntity
class MyApp_SomeMethods
something: (x) -> x + 1
class MyApp_OtherMethods
another: (x) -> x * 2
class MyApp_EventCount
schema:
eventCount: { type: 'int' }
class MyApp_EventCount_WithDefault
schema:
eventCount: { type: 'int', default: 42 }
class MyApp_DoubleEventCount
schema:
doubleEventCount: { type: 'int' }
compute_doubleEventCount: ->
@eventCount * 2
class MyApp_Status1
schema:
status: { type: 'string' }
compute_status: ->
"eventCount is #{@eventCount}"
class MyApp_Status2
schema:
status: { type: 'string' }
compute_status: ->
"doubleEventCount ends with #{@doubleEventCount % 10}"
class MyApp_SuperStatus
schema:
superStatus: { type: 'string' }
compute_superStatus: ->
"(#{@status})"
class MyApp_PathCollection
schema:
pluginPaths: { collection: 'list', type: 'string' }
class MyApp_RubyCollection1
schema:
rubies: { collection: 'list', type: 'LRRuby' }
class LRRuby_Basics
schema:
path: { type: String }
class LRRuby_Version
schema:
version: { type: String }
compute_version: (callback) ->
setTimeout =>
if @path is '/usr/bin/ruby'
callback(null, "1.8.7")
else
callback(null, "1.9.3")
, 5
class MyApp_SystemRubies
schema:
systemRubies: { collection: 'list', type: 'LRRuby' }
compute_systemRubies: (callback) ->
process.nextTick ->
callback null, [
{ path: '/usr/bin/ruby' }
{ path: '/usr/local/bin/ruby' }
{ path: '~/.rvm/rubies/rvm-ruby-1.9.2/bin/ruby' }
]
LittleQuacker =
name: 'PI:NAME:<NAME>END_PI'
duck:
quacks: yes
swims: no
class Tom_InSouthboundDuckling
schema:
southernFriedDuck: { type: 'LittleQuacker?' }
hunger: ->
100 - (@southernFriedDuck?.calories or 0)
class Jerry_MutualRef
schema:
enemy: { type: 'Tom?' }
class Tom_MutualRef
schema:
enemy: { type: 'Jerry?' }
describe 'R', ->
it "exports VERSION", ->
ok R.VERSION.match /^\d+\.\d+\.\d+$/
describe 'R.Universe', ->
helpers.usesUniverse()
describe ".uniqueId(className, [detail]) return value", ->
it "is a string", ->
equal typeof(@universe.uniqueId('Foo')), 'string'
it "is composed of the prefix followed by some digits", ->
ok @universe.uniqueId('Foo').match /^Foo\d+$/
it "is different on each call", ->
ok @universe.uniqueId('Foo') != @universe.uniqueId('Foo')
describe "(when the <detail> argument is specified)", ->
it "has an alphanumeric representation of <detail> appended to the id", ->
id = @universe.uniqueId('Foo', 'Bar Boz')
ok id.match(/^Foo\d+_Bar_Boz$/), "id '#{id}' does not match"
describe '#define(klass)', ->
describe "given a regular class", ->
helpers.defines(EmptyEntity)
it "defines a kind matching the given class name", ->
deepEqual @universe.kindNames, ['EmptyEntity']
it "adds the given class to the list of the kind's mixins", ->
deepEqual @universe.kinds.EmptyEntity.klasses, [EmptyEntity]
describe "given a class with a two-part name like Foo_Bar", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
it "uses only the first part as the entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe "given multiple classes for the same entity", ->
beforeEach -> @universe.define(MyApp_SomeMethods)
beforeEach -> @universe.define(MyApp_OtherMethods)
it "merges them into a single entity kind", ->
deepEqual @universe.kindNames, ['MyApp']
describe '#create(kind, [attributes])', ->
describe "given a valid kind", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "returns a new entity of that kind", ->
ok @entity.isReactiveEntity
equal @entity.kind, 'MyApp'
describe "given an unknown kind", ->
beforeEach -> @err = catchAndReturnError => @universe.create('NonExistent')
it "throws an error", ->
equal @err.message, "R.create: unknown entity kind 'NonExistent'"
describe 'R.Entity', ->
helpers.usesUniverse().usesRootEntity()
describe ".constructor.name", ->
helpers.defines(MyApp_SomeMethods).creates('entity', 'MyApp')
it "matches entity kind", ->
equal @entity.constructor.name, 'MyApp'
describe "composed of two classes", ->
helpers.defines(MyApp_SomeMethods, MyApp_OtherMethods).creates('entity', 'MyApp')
it "has the methods defined in both classes", ->
equal @entity.something(10), 11
equal @entity.another(10), 20
describe "with a magical automatically_ block", ->
helpers.defines(SimpleEntity_WithAutomaticalBlock).createsInJob('entity', 'SimpleEntity', someValue: 42)
it "runs the block after entity creation", ->
deepEqual @entity.list, [42]
describe "after the block's dependencies are updated", ->
helpers.performAndWait "update someValue", ->
@entity.someValue = 33
it "reruns the block after entity creation", ->
deepEqual @entity.list, [42, 33]
describe "that extends another entity", ->
helpers.defines(BaseEntity, DerivedEntity).createsInJob('entity', 'DerivedEntity')
it "has its own initializers invoked", ->
ok @entity.derivedInitialized
it "inherits the base entity's initializers", ->
ok @entity.baseInitialized
it "has the derived entity's attributes", ->
equal @entity.anotherValue, 33
it "inherits the base entity's attributes", ->
equal @entity.someValue, 42
it "has the derived entity's methods", ->
equal @entity.veryBoringMethod(), 12
it "inherits the base entity's methods", ->
equal @entity.boringMethod(), 24
it "runs the derived entity's automatic blocks", ->
equal @entity.anotherValueCopy, 33
it "inherits the base entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that includes another entity", ->
helpers.defines(IncludingEntity).createsInJob('entity', 'IncludingEntity')
it "obtains the included entity's initializers", ->
ok @entity.baseInitialized
it "obtains the included entity's attributes", ->
equal @entity.someValue, 42
it "obtains the included entity's methods", ->
equal @entity.boringMethod(), 24
it "obtains the included entity's automatic blocks", ->
equal @entity.someValueCopy, 42
describe "that references a non-existent entity", ->
it "can be defined", ->
@universe.define(Tom_MutualRef)
it "cannot be instantiated", ->
@universe.define(Tom_MutualRef)
throws /Unknown type/, =>
tom = @universe.create('Tom')
describe "with a cyclic reference to another entity", ->
it "can be defined and instantiated", ->
@universe.define(Tom_MutualRef, Jerry_MutualRef)
tom = @universe.create('Tom')
jerry = @universe.create('Jerry')
describe 'R.Entity attribute', ->
helpers.usesUniverse().usesLog()
describe "(a boring regular one)", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "can be initialized by passing a value into constructor", ->
equal @entity.eventCount, 10
describe "when read via .get(attr)", ->
helpers.inspectReads()
beforeEach -> @value = @entity.get('eventCount')
it "returns the current value", ->
equal @value, 10
it "records the dependency", ->
@log.equal ["reading MyApp1 eventCount"]
it "can be read directly", ->
equal @entity.eventCount, 10
it "can be set via .set()", ->
@entity.set('eventCount', 20)
equal @entity.eventCount, 20
describe "when set directly", ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 20
it "updates the value", ->
@entity.eventCount = 20
equal @entity.eventCount, 20
it "broadcasts a change event", ->
@log.equal ["change MyApp1 eventCount"]
describe 'when set to the same value', ->
helpers.inspectBroadcasts()
beforeEach ->
@entity.eventCount = 10
it "does not broadcast a change event", ->
@log.equal []
describe "not defined by the schema", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
it "throws an error when reading via #get", ->
equal((catchAndReturnError => @entity.get('missingAttr'))?.message, "Unknown attribute 'missingAttr'")
it "throws an error when writing via #set", ->
equal((catchAndReturnError => @entity.set('missingAttr', 20))?.message, "Unknown attribute 'missingAttr'")
describe "with a default value", ->
helpers.usesRootEntity().defines(MyApp_EventCount_WithDefault).creates('entity', 'MyApp')
it "is initially set to the provided default value", ->
equal @entity.eventCount, 42
describe "with a type", ->
helpers.usesRootEntity().defines(MyApp_EventCount).creates('entity', 'MyApp')
it "is initially set to the type-specific default value", ->
equal @entity.eventCount, 0
describe "when assigned a value of a different compatible type", ->
it "coerces the value into the correct type", ->
@entity.eventCount = "11"
equal @entity.eventCount, 11
equal (typeof @entity.eventCount), 'number'
describe "when assigned a value of an incompatible type", ->
it "throws an error", ->
throws /expected an integer/, =>
@entity.eventCount = []
throws /expected an integer/, =>
@entity.eventCount = null
describe "with a duck type", ->
helpers.defines(LittleQuacker, Tom_InSouthboundDuckling).creates('tom', 'Tom')
describe "when provided with the the right duck", ->
it "eats it promptly", ->
@tom.southernFriedDuck = { quacks: yes, swims: no, calories: 100 }
equal @tom.hunger(), 0
describe "when provided with an ignited bomb", ->
it "chokes and fails to eat", ->
throws /expected a LittleQuacker-like object/, =>
# 1 kg TNT generates exactly 1 Mcal energy; assuming a 250g duckling and a weight match here
@tom.southernFriedDuck = { boomIn: 3, calories: 250000 }
equal @tom.hunger(), 100
describe "with a compute function", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_Status1)
helpers.createsInJob('entity', 'MyApp', eventCount: 10)
it "is initially set to the default value", ->
equal @universe.create('MyApp', eventCount: 10).status, ""
it "is eventually set to the computed value", ->
equal @entity.status, "eventCount is 10"
describe "when the source value is updated", ->
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 20
it "the computed values is also eventually updated", ->
equal @entity.status, "eventCount is 20"
describe "with an async compute function", ->
helpers.usesRootEntity().defines(LRRuby_Basics, LRRuby_Version)
helpers.createsInJob('entity', 'LRRuby', path: '/usr/bin/ruby')
it "is initially set to the default value", ->
equal @universe.create('LRRuby', path: '/usr/bin/ruby').version, ""
it "is eventually set to the computed value", ->
equal @entity.version, "1.8.7"
describe "when the source value is updated", ->
helpers.performAndWait "update ruby.path", ->
@entity.path = '/usr/local/bin/ruby'
it "the computed values is also eventually updated", ->
equal @entity.version, "1.9.3"
describe "with a chain of dependent computed attributes", ->
helpers.usesRootEntity().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_SuperStatus, MyApp_Status2)
helpers.createsInJob('entity', 'MyApp', eventCount: 14)
it "all computed attributes are eventually set to the final values", ->
equal @entity.doubleEventCount, 28
equal @entity.status, "doubleEventCount ends with 8"
equal @entity.superStatus, "(doubleEventCount ends with 8)"
describe "when the source attribute is updated", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 16
it "the computed attributes are all eventually updated", ->
equal @entity.doubleEventCount, 32
equal @entity.status, "doubleEventCount ends with 2"
equal @entity.superStatus, "(doubleEventCount ends with 2)"
it "change events are broadcasted for the source and computed attributes", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount", "change MyApp1 status", "change MyApp1 superStatus"]
describe "when the source attribute is set to the same value", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 14
it "no change events are broadcasted", ->
@log.equal []
describe "when the source attribute is updated, but intermediate attribute value stays the same", ->
helpers.inspectBroadcasts()
helpers.performAndWait "set eventCount", ->
@entity.eventCount = 9
it "no change events are broadcasted for unmodified values", ->
@log.equal ["change MyApp1 eventCount", "change MyApp1 doubleEventCount"]
describe "R.Task", ->
helpers.usesUniverse().usesRootEntity().usesLog()
describe "with ONESHOT type, sync function and no subtasks", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, @log.pusher("RTask1_smt.run")))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_smt.run'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type, sync function and a subtask", ->
beforeEach ->
func1 = =>
@log.push "RTask1_parent.start"
@task2.schedule()
@log.push "RTask1_parent.end"
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'parent', R.TaskDef.ONESHOT, func1, multiuse: no, reactive: no))
@task2 = new R.Task(@root, new R.TaskDef(@universe, 'child', R.TaskDef.ONESHOT, @log.pusher("RTask2_child.run"), multiuse: no, reactive: no))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
.inspectTaskEvents(@task2, ['completed', 'finalized'])
it "reports parent task completion, then subtask completion/finalization, then parent task finalization", (done) ->
await @task1.schedule().waitFinalized(defer())
@log.equal [
'RTask1_parent.start'
'RTask1_parent.end'
'RTask1_parent.completed'
'RTask2_child.run'
'RTask2_child.completed'
'RTask2_child.finalized'
'RTask1_parent.finalized'
]
done()
describe "with AUTOREPEAT type and a sync function that loves to change its dependencies", ->
helpers.inspectSubscriptions().defines(MyApp_EventCount, MyApp_DoubleEventCount, MyApp_Status1).creates('entity', 'MyApp', eventCount: 10)
func = ->
for key in @keys
dummy = @entity[key]
beforeEach ->
@keys = ['PI:KEY:<KEY>END_PIstatus']
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
it "subscribes to the initial set topics", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
]
done()
describe "when a dependency is removed", ->
helpers.performAndWait "update eventCount", ->
@keys = ['PI:KEY:<KEY>END_PI']
@entity.eventCount = 11
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount'
]
describe "when a dependency is added", ->
helpers.performAndWait "update eventCount", ->
@keys = ['PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI']
@entity.eventCount = 12
it "resubscribes to the new set of topics", ->
@log.equal [
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,status'
'sub RTask1_compute_doubleEventCount MyApp1 eventCount'
'sub RTask2_compute_status MyApp1 eventCount'
'sub RTask3_smt MyApp1 eventCount,doubleEventCount'
]
describe "with AUTOREPEAT type, sync function and no subtasks", ->
helpers.defines(MyApp_EventCount).creates('entity', 'MyApp', eventCount: 10)
func = ->
@log.push "RTask1_smt.run eventCount=#{@entity.eventCount}"
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.AUTOREPEAT, func.bind(@)))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports completion and finalization", (done) ->
await @task1.on 'finalized', defer()
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
it "accumulates dependencies when reading an entity", ->
await @task1.on 'finalized', defer()
deps = @task1.completedInvocation._topicsByDepId
deepEqual Object.keys(deps).sort(), ['MyApp1']
deepEqual Object.keys(deps.MyApp1).sort(), ['eventCount']
it "runs again when dependencies are updated", (done) ->
await @task1.on 'finalized', defer()
await @performAndWait defer(), "update eventCount", =>
@entity.eventCount = 20
@log.equal [
'RTask1_smt.run eventCount=10'
'RTask1_smt.completed'
'RTask1_smt.finalized'
'RTask1_smt.run eventCount=20'
'RTask1_smt.completed'
'RTask1_smt.finalized'
]
done()
describe "with ONESHOT type and a failing sync function", ->
beforeEach ->
@task1 = new R.Task(@root, new R.TaskDef(@universe, 'smt', R.TaskDef.ONESHOT, (-> throw new Error 'ETEST')))
@log.inspectTaskEvents(@task1, ['completed', 'finalized'])
it "reports a failed completion", (done) ->
await @task1.schedule().waitCompleted(defer())
equal "#{@task1.lastError}", "Error: ETEST"
done()
describe "R.Collection", ->
helpers.usesUniverse().usesRootEntity().usesLog()
.defines(LRRuby_Basics)
describe 'basic functionality', ->
helpers.defines(MyApp_RubyCollection1).creates('entity', 'MyApp')
it "works fine for a proof of concept", (done) ->
equal typeof(@entity.rubies), 'object'
ok @entity.rubies instanceof R.ListCollection
equal @entity.rubies.all.length, 0
ruby = @universe.create('LRRuby')
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 1
task = new R.Task @root, new R.TaskDef @universe, "Add Ruby", R.TaskDef.AUTOREPEAT, =>
@entity.rubies.push(ruby)
equal @entity.rubies.all.length, 2
await task.schedule().waitFinalized defer()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 1, "Wrong number of subscribed queries (#{keys.length} instead of 1), actual subscribed IDs: #{JSON.stringify keys}"
task.dispose()
keys = Object.keys(@entity.rubies._queries)
equal keys.length, 0, "Wrong number of subscribed queries (#{keys.length} instead of 0), actual subscribed IDs: #{JSON.stringify keys}"
done()
describe "computed source collections", ->
helpers.defines(MyApp_SystemRubies).createsInJob('entity', 'MyApp')
it "work", (done) ->
equal @entity.systemRubies.all.length, 3
equal @entity.systemRubies.all[0].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[1].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[2].constructor.name, 'LRRuby'
equal @entity.systemRubies.all[0].path, '/usr/bin/ruby'
equal @entity.systemRubies.all[1].path, '/usr/local/bin/ruby'
done()
|
[
{
"context": "# Used some code from https://github.com/atom/markdown-preview/blob/9ff76ad3f6407a0fb68163a538c",
"end": 45,
"score": 0.9953123331069946,
"start": 41,
"tag": "USERNAME",
"value": "atom"
},
{
"context": "e-break-on-single-newline': =>\n keyPath = 'ever-notedown.br... | lib/ever-notedown.coffee | josephsieh/ever-notedown | 170 | # Used some code from https://github.com/atom/markdown-preview/blob/9ff76ad3f6407a0fb68163a538c6d460280a1718/lib/main.coffee
#
# Reproduced license info:
# Copyright (c) 2014 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
path = require 'path'
{CompositeDisposable, Disposable} = require 'atom'
{File, Directory} = require 'atom'
{$} = require 'atom-space-pen-views'
TextEditor = null
CSON = null
chartsHelper = null
evernoteHelper = null # delay require './evernote-helper'
storage = null # delay require './storage-manager'
noteHelper = null # require './note-helper'
mathjaxHelper = null
utils = null
fenceNameForScope = null #require './extension-helper'
fs = null #require 'fs-plus'
git = null # requrie 'git-utils'
cheerio = null # require 'cheerio'
clipboard = null
_ = null # require 'underscore-plus'
# used some code from atom/markdown-preview/lib/main.coffee
url = null #require 'url'
NoteManagerView = null # Defer until used
EVNDPreviewView = null # Defer until used
EVNDView = null # Defer until used
NoteInfoView = null
ConfirmDialog = null
InfoDialog = null
SearchNoteView = null
SearchResultListView = null
renderer = null # Defer until used
isNoteInfoView = (object) ->
NoteInfoView ?= require './info-dialog'
object instanceof NoteInfoView
isNoteManagerView = (object) ->
NoteManagerView ?= require './note-manager'
object instanceof NoteManagerView
isEVNDView = (object) ->
EVNDView ?= require './ever-notedown-view'
object instanceof EVNDView
createEVNDPreviewView = (state) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
new EVNDPreviewView(state)
isEVNDPreviewView = (object) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
object instanceof EVNDPreviewView
atom.deserializers.add
name: 'EVNDPreviewView'
deserialize: (state) ->
createEVNDPreviewView(state) if state.constructor is Object
defaultGitPath = path.join atom.getConfigDirPath(), 'evnd/'
themeDict =
"Default": "assets/themes/default/style.css"
"Default 2": "assets/themes/default2/style.css"
"Default 3": "assets/themes/default3/style.css"
"Atom": "assets/themes/atom/style.css"
"Custom 1": "assets/themes/custom1/style.css"
"Custom 2": "assets/themes/custom2/style.css"
syntaxThemeDict =
"Default": "assets/themes-syntax/default/style.css"
"Default 2": "assets/themes-syntax/default2/style.css"
"One Light": "assets/themes-syntax/one-light/style.css"
"One Dark": "assets/themes-syntax/one-dark/style.css"
"Solarized Light": "assets/themes-syntax/solarized-light/style.css"
"Solarized Dark": "assets/themes-syntax/solarized-dark/style.css"
"Github": "assets/themes-syntax/github/style.css"
"Chester": "assets/themes-syntax/chester/style.css"
"Tomorrow": "assets/themes-syntax/tomorrow/style.css"
"IDLE": "assets/themes-syntax/IDLE/style.css"
"Seti Syntax": "assets/themes-syntax/seti-syntax/style.css"
"Cobalt": "assets/themes-syntax/cobalt/style.css"
"Monokai": "assets/themes-syntax/monokai/style.css"
"Serpia": "assets/themes-syntax/serpia/style.css"
"Custom 1": "assets/themes-syntax/custom1/style.css"
"Custom 2": "assets/themes-syntax/custom2/style.css"
noteTemplateDict =
"Default": "assets/templates/default.markdown"
"Lecture Notes": "assets/templates/lecture_notes.markdown"
"None": ""
"Custom 1": "assets/templates/custom1.markdown"
"Custom 2": "assets/templates/custom2.markdown"
evndGrammarList = [
'source.gfm'
'source.litcoffee'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
# Global variables?
window.evnd =
evndView: null
editor: null
searchNoteView: null
searchResultListView: null
noteManagerView: null
cssTheme: ""
cssCode: ""
template: ""
noteIndex: null
storageManager: null
enHelper: null
init: null
chartsLibsLoaded: null
gitPath: null
gitPathSymlink: null
svgCollections: null
newNoteDisposables: null
module.exports =
config:
showPath:
type: 'boolean'
default: true
order: 1
gitPath:
type: 'string'
default: defaultGitPath
description: 'Default location to store your ever-notedown notes, GIT-backed'
order: 2
gitPathSymlink:
type: 'boolean'
default: true
description: 'Check this if the specified gitPath is a symbolic link'
order: 3
openNoteInEvernoteAuto:
title: 'Open Note in Evernote'
type: 'boolean'
default: false
description: "Automatically open note in Evernote client after note creation or modification"
order: 4
pulledContentInSplitPane:
type: 'boolean'
default: false
description: "After loading note content from the Evernote client database, put the loaded content in a separate pane as a new file? (default: false, will overwrite old note content)."
order: 5
sortBy:
type: 'string'
default: 'Title'
enum: ['default', 'Title', 'Notebook', 'Creation Time', 'Modification Time']
description: 'Default sorting is the order in which the notes are displayed in the drop-down note browser'
order: 6
convertHexNCR2String:
title: 'Convert Hex NCR to String'
type: 'boolean'
default: true
description: 'When importing (or pulling) from Evernote, convert hex NCR represented Unicode characters to UTF8 string'
order: 7
defaultFormat:
type: 'string'
default: 'Markdown'
enum: ['Text', 'Markdown', 'HTML']
description: '(Please choose only "Markdown" for now...)'
order: 7
codeSnippet:
type: 'boolean'
default: true
description: 'Render selected content as a fenced code block'
order: 8
toc:
title: 'TOC'
type: 'boolean'
default: true
description: 'Enable Table of Contents generation ([TOC])'
order: 9
checkbox:
type: 'boolean'
default: true
description: 'Render ([ ], [x]) as checkboxes everywhere'
order: 10
footnote:
type: 'boolean'
default: true
description: 'Parse footnotes in MMD style...([^text] for reference, [^text]: for definition)'
order: 11
mathjax:
type: 'boolean'
default: true
description: 'Enable MathJax processing'
order: 12
mathjaxOutput:
type: 'string'
default: 'SVG'
enum: ['SVG'] #['SVG', 'HTML/CSS']
order: 13
mathjaxCustomMacros:
type: 'string'
default: "Physical Sciences"
enum: [
"None",
"Default",
"Physical Sciences",
"Math",
"Custom 1",
"Custom 2"
]
order: 14
description: 'Use custom defined macros (~/.atom/packages/ever-notdown/assets/mathjax/macros/custom.json) for MathJax rendering. (After making changes, please use "View -> Reload" for the change to take effect.)'
breakOnSingleNewline:
type: 'boolean'
default: false
description: 'Markdown rendering option'
order: 15
smartyPants:
type: 'boolean'
default: false
description: 'Use "smart" typograhic punctuation for things like quotes and dashes.'
order: 16
noteTemplate:
type: 'string'
default: 'Default'
description: 'Template for creating new note'
enum: [
"Default",
"Lecture Notes",
"Custom 1",
"Custom 2",
"None"
]
order: 17
theme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"Default 3",
"Atom",
"Custom 1",
"Custom 2"
]
order: 18
syntaxTheme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"One Light",
"One Dark",
"Solarized Light",
"Solarized Dark",
"Github",
"Chester",
"Tomorrow",
"IDLE",
"Seti Syntax",
"Cobalt",
"Monokai",
"Serpia",
"Custom 1",
"Custom 2"
]
order: 19
liveUpdate:
type: 'boolean'
default: true
description: 'For Markdown Preview'
order: 20
openPreviewInSplitPane:
type: 'boolean'
default: true
order: 21
syncScroll:
type: 'boolean'
default: true
description: 'Sync scrolling between the editor and the preview pane'
order: 22
grammars:
type: 'array'
default: [
'source.gfm'
'source.litcoffee'
'text.html.basic'
'text.plain'
'text.plain.null-grammar'
'text.markdown.evnd.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
]
order: 23
evndGrammar:
title: 'Extended grammar for syntax highlighting markdown files in editor'
type: 'string'
order: 24
enum: [
'Extended source.litcoffee'
'Extended source.gfm'
]
default: 'Extended source.gfm'
description: 'Support extra syntax highlighting, eg: inline HTML, MathJax equations, etc.'
subscriptions: null
# TODO: This CSS matter... should we just go for "getMarkdownPreviewCSS"?
activate: (state) ->
return unless process.platform is 'darwin' # OSX Only!
window.evnd.init = true
window.evnd.chartsLibsLoaded = false
#console.log atom.config.get('ever-notedown.gitPath')
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
mathjax = atom.config.get('ever-notedown.mathjax')
if mathjax
mathjaxHelper = require('./mathjax-helper')
mathjaxHelper.loadMathJax()
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register commands
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle': =>
@toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-note-list': =>
@createNoteManagerView(state).toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:selection-to-evernote', =>
@sel2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:file-to-evernote', =>
@file2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:pull-current-note-from-evernote', =>
@pullFromEvernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:import-note-from-evernote', =>
@showImportNotePanel()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:new-note', =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-config', =>
@openConfig()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-help-document', =>
@openHelpDoc()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:markdown-quick-ref', =>
@openMarkdownQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:mathjax-quick-ref', =>
@openMathJaxQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:notes-for-developers', =>
@openDevNotes()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-preview': =>
@togglePreview()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:copy-rendered-html': =>
@copyHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:save-rendered-html': =>
@saveHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-break-on-single-newline': =>
keyPath = 'ever-notedown.breakOnSingleNewline'
atom.config.set(keyPath, not atom.config.get(keyPath))
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-note-template': =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom1': =>
@editCustomTemplate('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom2': =>
@editCustomTemplate('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-theme-css': =>
@viewThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom1': =>
@editCustomThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom2': =>
@editCustomThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-syntax-theme-css': =>
@viewSyntaxThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom1': =>
@editCustomSyntaxThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom2': =>
@editCustomSyntaxThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-mathjax-macros': =>
@viewMathJaxMacros()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom1': =>
@editCustomMacros('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom2': =>
@editCustomMacros('Custom 2')
@subscriptions.add atom.commands.add 'atom-text-editor', 'drop': (event) =>
#console.log 'Dropping item!'
@onDrop(event)
@subscriptions.add atom.commands.add 'atom-text-editor',
'core:paste': (event) =>
#console.log "Pasting stuff!"
event.stopPropagation()
@pasteImage()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:bold-text': =>
@boldText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:emphasis-text': =>
@emphasisText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:underline-text': =>
@underlineText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:highlight-text': =>
@highlightText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:strikethrough-text': =>
@strikeThroughText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:blockquote': =>
@blockquote()
@subscriptions.add atom.workspace.observePaneItems (item) =>
if isEVNDPreviewView(item)
item.disposables.add item.onDidClickButtonEvernote (editor, previewView) =>
@file2Evernote(editor, previewView)
item.disposables.add item.onDidClickButtonPull (filePath, previewView) =>
@pullFromEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonNewNote =>
@openNewNote()
item.disposables.add item.onDidClickButtonHome =>
@toggle()
item.disposables.add item.onDidClickButtonEye (filePath, previewView) =>
@openNoteInEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonInfo (note) =>
@confirmedNoteItem({note: note})
item.disposables.add item.onDidClickButtonHTML (note) =>
@getNoteHTML({note: note})
item.disposables.add item.onDidClickButtonENML (note) =>
@getNoteENML({note: note})
item.disposables.add item.onDidClickButtonFolder (notePath) =>
@openFinder(notePath)
@subscriptions.add item.disposables
previewFile = @previewFile.bind(this)
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.markdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.md]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkd]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.ron]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.txt]',
'ever-notedown:preview-file', previewFile
atom.workspace.addOpener (uriToOpen) ->
try
{protocol, host, pathname} = url.parse(uriToOpen)
catch error
return
return unless protocol is 'ever-notedown-preview:'
try
pathname = decodeURI(pathname) if pathname
catch error
return
if host is 'editor'
createEVNDPreviewView(editorId: pathname.substring(1))
else
createEVNDPreviewView(filePath: pathname)
deactivate: ->
# TODO: manage storage?
#if atom.config.get('ever-notedown.mathjax') then @removeMathJaxGrammar()
@removeEVNDGrammar()
window.evnd.noteIndex?.update()
window.evnd.noteManagerView?.destroy?()
window.evnd.searchResultListView?.destroy?()
window.evnd.evndView?.destroy()
@subscriptions.dispose()
for k, v of window.evnd
if k in ["cssTheme", "cssCode", "template"]
window.evnd[k] = ""
else
window.evnd[k] = null
serialize: ->
noteManagerViewState: window.evnd.noteManagerView?.serialize()
toggle: ->
if window.evnd.init then @loadModule()
unless window.evnd.evndView?
EVNDView ?= require './ever-notedown-view'
newEVNDView = new EVNDView(@)
newEVNDView.disposables.add newEVNDView.onDidClickButtonImportNote =>
@showImportNotePanel()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNewNote =>
newEVNDView.hide()
@openNewNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote =>
@deleteNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNoteList =>
@createNoteManagerView(@).toggle()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenConfig =>
@openConfig()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenHelp =>
@openHelpDoc()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenNote (note) =>
newEVNDView.hide()
@openNote(note)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenFinder (notePath) =>
@openFinder(notePath)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenInfo (note) =>
@confirmedNoteItem({note: note})
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote (note) =>
@deleteNote {note:note}, (deleted) =>
if deleted then newEVNDView.refresh()
newEVNDView.disposables.add newEVNDView.onDidClickButtonExportNote () =>
@saveHtml()
@subscriptions.add newEVNDView.disposables
window.evnd.evndView = newEVNDView
window.evnd.evndView.toggle(@)
#
# Based on the official Atom Markdown Preview package
# Updated Nov 15, 2015
# TODO: move these functions to `ever-notedown-preview-view.coffee`
#
getTextEditorStyles: ->
textEditorStyles = document.createElement("atom-styles")
textEditorStyles.initialize(atom.styles)
textEditorStyles.setAttribute "context", "atom-text-editor"
document.body.appendChild textEditorStyles
# Extract style elements content
Array.prototype.slice.apply(textEditorStyles.childNodes).map (styleElement) ->
styleElement.innerText
# TODO: remove the particular {overflow-y: scroll;}?
getMarkdownPreviewCSS: ->
return @markdownPreviewCSS if @markdownPreviewCSS
markdowPreviewRules = []
ruleRegExp = /\.ever-notedown-preview/
cssUrlRefExp = /url\(atom:\/\/ever-notedown\/assets\/(.*)\)/
for stylesheet in document.styleSheets
if stylesheet.rules?
for rule in stylesheet.rules
# We only need `.ever-notedown-preview` css
markdowPreviewRules.push(rule.cssText) if rule.selectorText?.match(ruleRegExp)?
@markdownPreviewCSS = markdowPreviewRules
.concat(@getTextEditorStyles())
.join('\n')
.replace(/([^\.])atom-text-editor/g, '$1pre.editor-colors') # <atom-text-editor> are now <pre>
.replace(/:host/g, '.host') # Remove shadow-dom :host selector causing problem on FF
.replace cssUrlRefExp, (match, assetsName, offset, string) -> # base64 encode assets
assetPath = path.join __dirname, '../assets', assetsName
originalData = fs.readFileSync assetPath, 'binary'
base64Data = new Buffer(originalData, 'binary').toString('base64')
"url('data:image/jpeg;base64,#{base64Data}')"
@markdownPreviewCSS
editCustomSyntaxThemeCSS: (syntaxThemeName) ->
return unless syntaxThemeName.indexOf('Custom') > -1 and syntaxThemeDict[syntaxThemeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeDict[syntaxThemeName]
atom.workspace.open(syntaxThemeFilePath)
viewSyntaxThemeCSS: ->
@loadCSS() unless window.evnd.cssCode?
syntaxThemeCSS = window.evnd.cssCode
atom.workspace.open('').then (editor) =>
editor.setText(syntaxThemeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
editCustomThemeCSS: (themeName) ->
return unless themeName?.indexOf('Custom') > -1 and themeDict[themeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeDict[themeName]
atom.workspace.open(themeFilePath)
viewThemeCSS: ->
@loadCSS() unless window.evnd.cssTheme?
themeCSS = window.evnd.cssTheme
atom.workspace.open('').then (editor) =>
editor.setText(themeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
loadCSS: (themeName, syntaxThemeName) ->
# Load defined CSS themes
themeName ?= atom.config.get('ever-notedown.theme')
themeFileName = themeDict[themeName]
syntaxThemeName ?= atom.config.get('ever-notedown.syntaxTheme')
syntaxThemeFileName = syntaxThemeDict[syntaxThemeName]
return unless themeFileName? and syntaxThemeFileName?
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeFileName
window.evnd.cssTheme = fs.readFileSync(themeFilePath, 'utf8')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeFileName
window.evnd.cssCode = fs.readFileSync(syntaxThemeFilePath, 'utf8')
themePath = path.join evndPkgPath, "styles/theme.css"
themeCSS = window.evnd.cssTheme + window.evnd.cssCode
fs.writeFileSync(themePath, themeCSS, 'utf8')
@reloadTheme(themeCSS, {sourcePath: themePath})
return themeCSS
reloadTheme: (source, params) ->
return unless source
#console.log "Reloading css style sheet... #{params.sourcePath}"
sourcePath = params?.sourcePath
sourcePath ?= path.join atom.packages.resolvePackagePath('ever-notedown'), 'styles/theme.css'
priority = params?.priority
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
priority ?= styleElement.priority ? 0
atom.styles.removeStyleElement(styleElement)
#break
params.priority = priority
atom.styles.addStyleSheet(source, params)
@markdownPreviewCSS = null
removeTheme: (sourcePath) ->
return unless sourcePath
#console.log "Removing css style sheet... #{sourcePath}"
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
atom.styles.removeStyleElement(styleElement)
break
viewTemplate: ->
if window.evnd.init then @loadModule()
template = window.evnd.template ? @loadTemplate()
atom.workspace.open('').then (editor) =>
editor.setText(template)
editCustomTemplate: (templateName) ->
return unless templateName?.indexOf('Custom') > -1 and
noteTemplateDict[templateName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
atom.workspace.open templateFilePath, {searchAllPanes: true}
loadTemplate: (templateName) ->
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateName ?= atom.config.get('ever-notedown.noteTemplate')
if templateName is "None"
window.evnd.template = ""
else
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
window.evnd.template = fs.readFileSync(templateFilePath, 'utf8')
return window.evnd.template
viewMathJaxMacros: ->
if window.evnd.init then @loadModule()
unless atom.config.get('ever-notedown.mathjax')
window.alert "MathJax is not enabled currently!"
return
mathjaxHelper ?= require './mathjax-helper'
console.log mathjaxHelper
macros = mathjaxHelper.loadCustomMacros()
console.log macros
atom.workspace.open('').then (editor) =>
editor.setText(mathjaxHelper.macrosToCSONString(macros))
grammar = atom.grammars.grammarForScopeName('source.coffee')
if grammar? then editor.setGrammar(grammar)
editCustomMacros: (macroName) ->
if window.evnd.init then @loadModule()
mathjaxHelper ?= require './mathjax-helper'
return unless macroName?.indexOf('Custom') > -1 and
mathjaxHelper.macroPaths[macroName]?
atom.workspace.open(mathjaxHelper.macroPaths[macroName])
getGitDir: (gitPath, gitPathSymlink) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
if window.evnd.storageManager?.gitDir? and
window.evnd.storageManager.gitPath is gitPath and
window.evnd.storageManager.gitPathSymlink is gitPathSymlink
return window.evnd.storageManager?.gitDir
gitDir = new Directory(gitPath, gitPathSymlink)
return gitDir
getRealGitPath: ->
gitDir = @getGitDir()
return gitDir.getRealPathSync()
loadGitRepo: (gitPath, gitPathSymlink, callback) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
#console.log "Git Path: " + gitPath
storage ?= require './storage-manager'
gitDir = @getGitDir(gitPath, gitPathSymlink)
loadGitRepoNormal = =>
if window.evnd.storageManager?.gitPath is gitPath and
window.evnd.storageManager?.gitPathSymlink is gitPathSymlink and
window.evnd.storageManager?.gitDir?.existsSync()
if window.evnd.storageManager.gitRepo is null
window.evnd.storageManager.initRepo () =>
callback(window.evnd.storageManager)
else
callback(window.evnd.storageManager)
else
storageOptions =
gitPath: gitPath
gitPathSymlink: gitPathSymlink
gitRepo: null
gitDir: gitDir
window.evnd.storageManager ?= new storage.StorageManager(storageOptions)
callback(window.evnd.storageManager)
if not gitDir.existsSync()
dmsg = "The current GIT directory #{gitPath} "
if gitPathSymlink then dmsg += "(symolic link) "
dmsg += "for EVND doesn't exist!"
atom.confirm
message: dmsg
buttons:
"mkdir": =>
@initGitDir gitDir, () =>
loadGitRepoNormal()
"Open Settings": =>
@openConfig()
callback(null)
return
else
loadGitRepoNormal()
initGitDir: (gitDir, callback) ->
gitDir ?= @getGitDir()
if gitDir?.existsSync()
if callback? then callback()
return
atom.confirm
message: "Will create directory at #{gitDir.getRealPathSync()}"
buttons:
"Confirm": =>
fs ?= require 'fs-plus'
fs.makeTreeSync(gitDir.getRealPathSync())
if callback? then callback()
"Cancel": =>
if callback? then callback()
return
initJSONFile: (jsonFile, callback) ->
@initGitDir null, () =>
jsonFile.write("{}")
if callback? then callback()
loadJSON: (callback) ->
path ?= require 'path'
jsonPath = path.join(@getRealGitPath(), "index.json")
jsonFile = new File(jsonPath)
loadJSONNormal = =>
jsonFile.read().then (jsonString) =>
jobj = JSON.parse(jsonString)
noteHelper ?= require './note-helper'
options =
jsonOBJ: jobj
absfilename: jsonPath
file: jsonFile
callback(new noteHelper.NoteIndex(options))
if jsonFile.existsSync()
loadJSONNormal()
else
@initJSONFile jsonFile, () =>
loadJSONNormal()
openConfig: ->
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open 'atom://config/packages/ever-notedown/', options
openHelpDoc: ->
if window.evnd.init then @loadModule()
pathToHelpDoc = path.join __dirname, '../docs/help.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToHelpDoc, options).then (editor) =>
@addPreviewForEditor(editor)
openMarkdownQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openMathJaxQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openDevNotes: ->
if window.evnd.init then @loadModule()
pathToDevNotes = path.join __dirname, '../docs/dev_notes.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToDevNotes, options).then (editor) =>
@addPreviewForEditor(editor)
#
# toggle the search panel (similar to find-and-replace)
#
showImportNotePanel: ->
if window.evnd.init then @loadModule()
if window.evnd?.searchNoteView?.panel?
window.evnd.searchNoteView.show()
else
SearchNoteView ?= require './search-note-view'
window.evnd.searchNoteView = new SearchNoteView()
window.evnd.searchNoteView.show()
@subscriptions.add window.evnd.searchNoteView.onDidSearchWithString ({queryString, noteLink}={}) =>
@searchNotes({queryString:queryString, noteLink:noteLink})
#
# Open note list (Scroll List view)
#
createNoteManagerView: (state) ->
if window.evnd.init then @loadModule()
unless window.evnd.noteManagerView?
NoteManagerView ?= require './note-manager-view'
window.evnd.noteManagerView = new NoteManagerView()
@subscriptions.add window.evnd.noteManagerView.onDidConfirmNote (noteID) =>
@confirmedNoteItem({noteID: noteID})
window.evnd.noteManagerView
#
# Import from Evernote?
#
searchNotes: ({queryString, noteLink}={}) ->
if window.evnd.init then @loadModule()
reg0 = /^https\:\/\/www\.evernote\.com\/shard\/([^\s\/]+)\/[^\s\/]+\/([^\s\/]+)\/([^\s\/]+)\/$/i
if noteLink? and reg0.test(noteLink) #noteLink.slice(0, 8) is 'https://'
matched = reg0.exec(noteLink)
noteLink = "evernote:///view/#{matched[2]}/#{matched[1]}/#{matched[3]}/#{matched[3]}/"
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.searchNotes {queryString:queryString, noteLink: noteLink}, (result) =>
if (not result) or (not result.found) or (result? and _.size(result) < 2)
window.alert("No results found!")
return
else
SearchResultListView ?= require './search-result-list-view'
window.evnd.searchResultListView = new SearchResultListView(result)
window.evnd.searchResultListView.show()
@subscriptions.add window.evnd.searchResultListView.onDidConfirmSearchResult (noteLink) =>
@importFromEvernote({noteLink: noteLink})
handleToEvernoteError: (error, noteOptions) ->
message = "#{error.message} when trying to send note to Evernote"
detail = "Note options:\n"
for k, v of noteOptions
continue if k in ["rawHTML", "text", "css"]
detail += " #{k}: #{JSON.stringify(v)}\n"
stack = "#{error.stack}\n"
atom.notifications.addError(message, {stack: stack, detail: detail, dismissable: true})
# TODO: Handles "code snippet"
# TODO: use selection.getScreenRange() (for code annotating?)
#
sel2Evernote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curFilePath = editor.getPath()
lastSelection = editor.getLastSelection()
selectionText = lastSelection.getText().toString() #editor.getSelectedText()
bufferRowRange = lastSelection.getBufferRowRange()
rowRange = "#L#{(bufferRowRange[0]+1).toString()}-#{(bufferRowRange[1]+1).toString()}"
if selectionText.trim().length is 0
window.alert "Nothing selected!"
return
# Convert selected text (Markdown) to HTML
# TODO: if current file is code file (selected text is code snippet), render
# TODO: renderer, async???
textContent = selectionText ? "Nothing here"
parsedInput = utils.parseMetaData(textContent)
newTitle = parsedInput.title
newTextContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
if utils.isMarkdown(curFilePath) or
editor.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
renderOptions = {mathjax: atom.config.get('ever-notedown.mathjax')}
else if atom.config.get('ever-notedown.codeSnippet')
if path.basename(curFilePath)?
newTitle = "Code snippet: #{path.basename(curFilePath)}#{rowRange}"
scopeName = editor.getGrammar()?.scopeName
fenceName = if scopeName? then fenceNameForScope(scopeName) else ""
newTextContent = "```#{fenceName}\n#{newTextContent}\n```\n"
newTextContent += "\n<br><br>**Source file**: #{curFilePath} \n"
newTextContent += "<br>**Clipped Time**: #{utils.getCurrentTimeString()} \n"
textContent = metaText + "\n\n" + newTextContent
renderOptions = {mathjax: false}
else
newHtmlContent = null
noteFormat = "Text"
tmpCss = null
if noteFormat is "Text"
options =
title: newTitle
update: false
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
else
renderer ?= require './renderer'
renderer.toHTML newTextContent, renderOptions.mathjax, editor.getPath(),
parsedInput, editor.getGrammar(), (error, html) =>
if error
console.error('Converting Markdown to HTML failed', error)
return # TODO: notify user
else
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else @loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
noteFormat = "Markdown"
newHtmlContent = html
options =
title: newTitle
update: false
moved: true
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
file2Evernote: (editor, previewView) ->
if window.evnd.init then @loadModule()
if previewView?
testView = previewView
editor ?= previewView.editor
else
testView ?= atom.workspace.getActivePane().getActiveItem()
editor ?= atom.workspace.getActiveTextEditor()
return unless editor? or isEVNDPreviewView(testView)
# update note in Evernote if current file is already in the EVND git repo
if editor?
curFilePath = editor.getPath()
else
editorId = parseInt(testView.editorId)
editor = testView.editor
curFilePath = testView.filePath
if editor?
curFilePath = editor.getPath()
else if curFilePath?
editor = atom.workspace.openSync(curFilePath, {searchAllPanes: true})
return unless curFilePath? and editor?
unless curFilePath?
if editor?
dMsg = "EVND will now try to save it as a new note... please try again later."
atom.notifications.addWarning("File is not yet saved!", {detail: dMsg, dismissable: true})
utils.timeOut(1000)
@saveNewNote(editor)
else
window.alert "File not saved! Cannot send to Evernote... please save first."
return
#if curFilePath.indexOf(atom.config.get('ever-notedown.gitPath')) > -1
gitPath0 = @getRealGitPath()
gitPath1 = atom.config.get('ever-notedown.gitPath')
if curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1
update = true
moved = false
#console.log("Will update this note...")
else
update = false
moved = true
#console.log("Will create a new note...")
textContent = editor.getText()
parsedInput = utils.parseMetaData(textContent)
newTextContent = parsedInput.content
newTitle = parsedInput.title
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
# TODO: Fix Async!!!
if utils.isMarkdown(curFilePath) or
editor?.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
previewView ?= @getPreviewViewForEditor(editor)
unless previewView?
@addPreviewForEditor(editor)
# TODO: notifiy user
dMsg = "Please check the rendered result in preview pane first!\n"
dMsg += "Please close this message, and wait until "
dMsg += "the preview finishes loading before trying again."
#window.alert(dMsg)
atom.notifications.addWarning('Content not rendered!', {detail: dMsg, dismissable: true})
return
if previewView.loading then utils.timeOut(500)
html = previewView[0].innerHTML
# TODO: Need to properly handle CSS selection
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else window.evnd.loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
newHtmlContent = html
noteFormat = "Markdown"
# Send resulting HTML to Evernote Application (create a new note or update)
else if utils.isHTML(curFilePath) or
editor?.getGrammar()?.scopeName in ['text.html.basic'] or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'HTML')
newHtmlContent = newTextContent
noteFormat = "HTML"
else # no need to convert
newHtmlContent = null
noteFormat = "Text"
options =
title: newTitle
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
update: update
filePath: curFilePath
renderOptions: {mathjax: atom.config.get('ever-notedown.mathjax')}
options.moved = moved
if not moved
options.path = path.dirname(curFilePath)
options.fnStem = path.basename(curFilePath, path.extname(curFilePath))
# Send content to Evernote Application (create a new note or update)
try
@toEvernote options, previewView, (curNote) =>
if options.moved then @openNote(curNote)
catch error
@handleToEvernoteError(error, options)
# TODO: Open the written file (in the default GIT repo)
# TODO: Async?
if options.moved
for editor in atom.workspace.getTextEditors() when editor.getPath() is curFilePath
@removePreviewForEditor(editor)
else
@addPreviewForEditor(editor)
toEvernote: (options, previewView, callback) ->
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
# Send resulting HTML to Evernote Application (create a new note)
# Note: This function contains an async call (osa)
# In the callback function of osa, a global variable should be updated
# TODO: tags, other implicit info encoding, etc.
options.update ?= false
noteHelper ?= require './note-helper'
if options.update
curNote = noteHelper.findNote(window.evnd.noteIndex, {title: options.title, fnStem: path.basename(options.filePath, path.extname(options.filePath)), dir: path.basename(path.dirname(options.filePath))})
if curNote is null
options.update = false
#console.log("Note not found in current note index")
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
else
#console.log("Note found in current note index")
curNote.update window.evnd.storageManager, options
else
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
#console.log("Current Note entity title: " + curNote.title)
window.evnd.noteIndex.addnote(curNote)
# TODO: Async call in storage manager
window.evnd.storageManager.addNote curNote, false, null, () =>
#console.log("Sending to evernote..." + utils.getCurrentTimeString())
unless previewView?
openNoteOptions =
searchAllPanes: true
addPreview: true
@openNote curNote, openNoteOptions, (editor) =>
previewView = @getPreviewViewForEditor(editor)
updateNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.updateNote curNote, curNote.addAttachments, true, (updateSuccess) =>
if updateSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Update Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
#console.log "Update failed!"
window.alert "Update failed!"
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
createNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.createNewNote curNote, (createSuccess) =>
if createSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate ? curNote.enCreationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Create new Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
window.alert "Something went wrong when trying to create new note..."
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
saveOnly = () =>
gitMessage = "Locally updated note \"#{curNote.title}\"\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curnote. true, gitMessage
#console.log(gitMessage)
window.alert(gitMessage.split(/[\n\r]/g)[0])
if options.update
window.evnd.enHelper.getNoteInfo curNote, null, (enNoteInfo) =>
if enNoteInfo?
#console.log("enNoteInfo: " + JSON.stringify(enNoteInfo, null, 4))
#console.log("curNote.lastSyncDate: " + utils.enDateToTimeString(curNote.lastSyncDate))
#console.log("curNote.modificationTime: " + curNote.modificationTime)
if enNoteInfo.enModificationDate isnt curNote.lastSyncDate
dMsg = "On the Evernote client side, this note was last modified "
dMsg += "on #{utils.enDateToTimeString(enNoteInfo.enModificationDate)}. "
dMsg += "But the last time the local note was in sync with the "
dMsg += "Evernote client was #{utils.enDateToTimeString(curNote.lastSyncDate)}.\n"
dMsg += "The local note was modified on #{curNote.modificationTime}.\n"
dMsg += "If you choose \"Update anyway\", the note content in the "
dMsg += "Evernote database will be overwritten AFTER the note is "
dMsg += "exported (you can find the exported note in the EVND folder).\n"
dMsg += "If you choose \"Save only\", the note content will be "
dMsg += "saved to the local EVND folder (with GIT commit), but "
dMsg += "no info will be sent to the Evernote client."
atom.confirm
message: "Alert: possible conflicts!"
detailedMessage: dMsg
buttons:
"Update anyway": -> updateNoteNormal()
"Save only": -> saveOnly()
"Cancel": -> return #console.log("cancelled update note")
else
lastSyncTime = utils.enDateToTimeString(curNote.lastSyncDate)
tMinStr = utils.timeMin(lastSyncTime, curNote.modificationTime)
#console.log(tMinStr)
if tMinStr isnt curNote.modificationTime
updateNoteNormal()
else
window.alert("Note hasn't changed, nothing to update.")
else # no note info was found
createNoteNormal()
else
createNoteNormal()
if callback? then callback(curNote)
openNewNote: (initText, options, callback) ->
# TODO: Template?
if window.evnd.init then @loadModule()
initText ?= window.evnd.template ? @loadTemplate()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
tmpDirPath = @makeNoteDir()
fs.makeTreeSync(tmpDirPath) unless fs.isDirectorySync(tmpDirPath)
options ?= {}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open('', options).then (editor) =>
if initText then editor.setText(initText)
editorElement = atom.views.getView(editor)
window.evnd.newNoteDisposables[editor.id] = atom.commands.add editorElement,
'core:save': (event) =>
event.stopPropagation()
@saveNewNote(editor, tmpDirPath)
switch atom.config.get('ever-notedown.defaultFormat')
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
if addPreview
@addPreviewForEditor editor, null, (previewView) =>
if callback? then callback(editor)
else if callback?
callback(editor)
makeNoteDir: ->
tmpTimeString = utils.getSanitizedTimeString()
tmpIndex = tmpTimeString.indexOf('_')
tmpDirName = if tmpIndex > -1 then tmpTimeString.slice(0, tmpIndex) else tmpTimeString
gitPath = @getRealGitPath()
tmpDirPath = path.join gitPath, tmpDirName
return tmpDirPath
saveNewNote: (editor, noteDir) ->
noteDir ?= @makeNoteDir()
text = editor.getText()
parsedInput = utils.parseMetaData(text)
title = parsedInput.title
textContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
filePath = path.join noteDir, utils.sanitizeFilename(title.toLowerCase()) + ".markdown"
if noteFilePath = atom.showSaveDialogSync(filePath)
options =
title: title
text: text # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
format: "Markdown"
filePath: noteFilePath
fs.writeFileSync(noteFilePath, text)
window.evnd.newNoteDisposables?[editor.id]?.dispose()
@removePreviewForEditor(editor)
editor.getBuffer().setPath(noteFilePath)
newNote = new noteHelper.MarkdownNote(options)
editor.save()
@addPreviewForEditor(editor, newNote)
gitMessage = "Created new note \"#{title}\" (locally) ...\n"
gitMessage += "#{newNote.summary()}"
window.evnd.storageManager.addNote newNote, true, gitMessage
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
openNote: (note, options, callback) ->
# TODO: What if the current note isn't of format "Markdown"?
#console.log "Opening note..."
absPath = note.absPath()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
options ?= {searchAllPanes: true}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open(absPath, options).then (editor) =>
switch note.format
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
#console.log "Note opened, now dealing with preview..."
if addPreview
@addPreviewForEditor editor, note, (previewView) =>
if callback? then callback(editor)
else
@removePreviewForEditor(editor)
#console.log "Note and preview opened, now handling callback..."
if callback? then callback(editor)
openNoteInEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if previewView?.note?
note = previewView.note
else if previewView?.noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: previewView.noteID})
else if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) > -1 or
filePath.indexOf(gitPath1) > -1
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = null
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openNote note, () =>
#console.log "New note opened in Evernote!"
return
openFinder: (notePath) ->
if window.evnd.init then @loadModule()
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openFinder notePath, () =>
#console.log "Note directory opened in Finder!"
return
searchOpenedNote: () ->
noteHelper ?= require './note-helper'
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
editor = atom.workspace.getActiveTextEditor()
if editor? and
(editor.getPath().indexOf(gitPath0) > -1 or
editor.getPath().indexOf(gitPath1) > -1)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
else
curView = atom.workspace.getActivePaneItem()
if isEVNDPreviewView(curView)
if curView.editor?
curFilePath = curView.editor.getPath()
else
curFilePath = curView.filePath
if curFilePath? and
(curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1)
fnStem = path.basename(curFilePath, path.extname(curFilePath))
dir = path.basename(path.dirname(curFilePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
return note
getNoteENML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = @searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getENML note, null, (enml) =>
if enml?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(enml)
grammar = atom.grammars.grammarForScopeName('text.xml')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting ENML failed..."
return
getNoteHTML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getHTML note, null, (html) =>
if html?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(html)
grammar = atom.grammars.grammarForScopeName('text.html.basic')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting HTML failed..."
return
confirmedNoteItem: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getNoteInfo note, null, (newNoteInfo) =>
if newNoteInfo?
window.evnd.enHelper.getAttachmentsInfo note, newNoteInfo.queryString, (newAttachmentsInfo) =>
InfoDialog ?= require './info-dialog'
infoDialog = new InfoDialog()
infoDialog.addInfo(note, newNoteInfo, newAttachmentsInfo)
infoDialog.show()
infoDialog.disposables.add infoDialog.onDidClickDelete (noteID) =>
@deleteNote({noteID:noteID})
infoDialog.disposables.add infoDialog.onDidOpenNote (noteID) =>
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
@openNote(note)
infoDialog.disposables.add infoDialog.onDidPullNote (noteID) =>
@pullFromEvernote(noteID)
@subscriptions.add infoDialog.disposables
else
window.alert("Note info retrieve error! (Maybe this note has not been sent to Evernote? Or it might have already been deleted in Evernote.)")
@openNote(note)
deleteNote: ({note, noteID, noteTitle}={}, callback) ->
if window.evnd.init then @loadModule()
if not note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if noteTitle?
note = noteHelper.findNote(window.evnd.noteIndex, {title: noteTitle})
else
note = @searchOpenedNote()
unless note?
#console.log "No active note (editor or preview) found!"
return
# TODO
confirmedDeleteNote = (note, callback) ->
window.evnd.noteIndex?.removeNote(note)
#console.log "Note #{note.title} deleted..."
for paneItem in atom.workspace.getPaneItems()
if paneItem.getPath? and paneItem.getPath() is note.absPath()
paneItem.destroy()
if callback? then callback(true)
atom.confirm
message: "Confirm: Delete Note \"#{note.title}\"?"
detailedMessage: "This action will remove note \"#{note.title}\" from note Index, but related files will remain on disk for now."
buttons:
"Confirm": => confirmedDeleteNote(note, callback)
"Cancel": =>
#console.log "Cancelled deleting note..."
if callback? then callback(false)
importFromEvernote: ({noteLink} = {}) ->
if window.evnd.init then @loadModule()
return unless noteLink?
note = noteHelper.findNote(window.evnd.noteIndex, {noteLink: noteLink})
if note?
@pullFromEvernote(note.id, note.path, null)
else # Construct a new note entity
# TODO: note format? Markdown? HTML?
window.evnd.enHelper.getNoteInfo null, {noteLink: noteLink}, (noteInfo) =>
enModificationTimeStr = utils.enDateToTimeString(noteInfo.enModificationDate)
noteInfo.creationTime = enModificationTimeStr
noteInfo.modificationTime = enModificationTimeStr
note = new noteHelper.MarkdownNote(noteInfo)
enDest = path.join(note.path, note.fnStem) + "_evernote"
window.evnd.enHelper.retrieveNote noteLink, note.queryString, enDest, () =>
utils.timeOut(200)
if not ("#{enDest}.html/" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.html/")
if not ("#{enDest}.enex" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.enex")
gitMessage = "About to import Evernote note \"#{note.title}\" ...\n"
gitMessage += "#{note.summary()}"
window.evnd.storageManager.addNote note, true, gitMessage
@pullFromEvernote(note.id, note.path, null)
pullFromEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) or filePath.indexOf(gitPath1)
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
pullNoteNormal = (note, options) =>
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.pullNote note, (updated, textContent, html, newNoteInfo) =>
#console.log "Note pulled..."
if not updated
@openNote note, null, () =>
window.alert("Nothing unsync'd! Opening note...")
return
else
openNoteOptions = {addPreview: true}
if options?.newPane or atom.config.get('ever-notedown.pulledContentInSplitPane')
openNoteOptions.addPreview = false
@openNote note, options, () =>
textContent = note.metaTextFromNoteInfo(newNoteInfo) + textContent
for editor in atom.workspace.getTextEditors() when editor.getPath() is note.absPath()
oldText = editor.getText()
if openNoteOptions.addPreview
editor.setText(textContent)
else
openNewNoteOptions = {addPreview:false, split: "right", activatePane: true}
visibleScreenRowRange = editor.getVisibleRowRange()
@openNewNote textContent, openNewNoteOptions, (newEditor) =>
row1 = visibleScreenRowRange[0]
row2 = visibleScreenRowRange[1]
try
newEditor.scrollToScreenPosition [parseInt((row1 + row2)/2), 0], {center: true}
catch e
console.log e
break
if openNoteOptions.addPreview
ConfirmDialog ?= require './confirm-dialog'
confirmDialogOptions =
editorId: editor.id
filePath: editor.getPath()
note: note
oldText: oldText
newText: textContent
newNoteInfo: newNoteInfo
confirmDialog = new ConfirmDialog confirmDialogOptions
confirmDialog.show()
if window.evnd.searchNoteView? then window.evnd.searchNoteView.cancel()
conflictStatus = note.checkConflict()
unless conflictStatus.unsyncdModificationInAtomEVND
if previewView? and previewView.editor?.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
else
notePath = note.absPath()
for editor in atom.workspace.getTextEditors() when editor.getPath() is notePath
if editor.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
break
if conflictStatus.unsyncdModificationInAtomEVND
detailedMsg = "You can still go ahead and grab content from Evernote, "
detailedMsg += "whether the new content will be put in a new pane or "
detailedMsg += "oevewrite existing content depends on your settings"
detailedMsg += "(EVND will wait for your confirmation to write new "
detailedMsg += "onto disk).\nYour current setting: "
if atom.config.get('ever-notedown.pulledContentInSplitPane')
detailedMsg += "open grabbed content in a separate pane.\n"
else
detailedMsg += "overwrite existing content.\n"
detailedMsg += "You can also make sure that this time the new content "
detailedMsg += "is put into a separate pane.\n\n"
detailedMsg += "Please choose how to proceed: "
atom.confirm
message: "There are changes that have not been sent to Evernote."
detailedMessage: detailedMsg
buttons:
"Cancel": => return #console.log "Cancelled"
"Go ahead": => pullNoteNormal(note, {searchAllPanes: true})
"Put pulled content in a new pane": =>
pullNoteNormal(note, {newPane: true, searchAllPanes: true})
else
pullNoteNormal(note, {searchAllPanes: true})
togglePreview: ->
if window.evnd.init then @loadModule()
if isEVNDPreviewView(atom.workspace.getActivePaneItem())
atom.workspace.destroyActivePaneItem()
return
editor = atom.workspace.getActiveTextEditor()
return unless editor?
grammars = atom.config.get('ever-notedown.grammars') ? []
unless editor.getGrammar().scopeName in grammars
scopeName = editor.getGrammar().scopeName
warningMsg = "Cannot preview this file because grammar '#{scopeName}' isn't supported.\n"
warningMsg += "\n(Current supported grammars set in EVND settings: #{grammars.toString()})"
window.alert(warningMsg)
return
@addPreviewForEditor(editor) unless @removePreviewForEditor(editor)
getPreviewViewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
evndPreviewView = previewPane.itemForURI(uri)
return evndPreviewView if isEVNDPreviewView(evndPreviewView)
return null
uriForEditor: (editor) ->
"ever-notedown-preview://editor/#{editor?.id}"
removePreviewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
previewPane.destroyItem(previewPane.itemForURI(uri))
true
else
false
addPreviewForEditor: (editor, note, callback) ->
uri = @uriForEditor(editor)
previousActivePane = atom.workspace.getActivePane()
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'right'
atom.workspace.open(uri, options).then (evNotedownPreviewView) =>
if isEVNDPreviewView(evNotedownPreviewView)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note ?= noteHelper?.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
evNotedownPreviewView.note = note
evNotedownPreviewView.noteID = note?.id
if note? then evNotedownPreviewView.activateButtons()
previousActivePane.activate()
if callback? then callback(evNotedownPreviewView)
boldText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "**#{selectedText}**", options
emphasisText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "_#{selectedText}_", options
underlineText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<u>#{selectedText}</u>", options
highlightText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<mark>#{selectedText}</mark>", options
strikeThroughText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "~~#{selectedText}~~", options
blockquote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
selectedTextLines = selectedText.toString().split(/[\n\r]/)
for i in [0..selectedTextLines.length-1]
selectedTextLines[i] = "> #{selectedTextLines[i]}"
newText = selectedTextLines.join("\n")
options =
select: true
editor.insertText newText, options
pasteImage: () ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor? and editor? isnt '' and atom.workspace.getActivePane().isFocused()
image = clipboard.readImage()
if not image.isEmpty()
buf = image.toPng()
imgBin = atob(buf.toString('base64'))
timeStr = utils.sanitizeTimeString(utils.getCurrentTimeString())
if window.evnd.storageManager?.gitPath
newPath = path.join(window.evnd.storageManager.gitPath, 'tmp/', "clipboard_#{timeStr}.png")
else
newPath = path.join(atom.getConfigDirPath(), 'evnd/tmp/', "#{timeStr}.png")
fs.writeFileSync(newPath, imgBin, 'binary')
editor.insertText("")
else
filePath = clipboard.readText().trim()
if fs.isFileSync(filePath)
if utils.isImage(filePath)
clipboard.writeText("")
else
clipboard.writeText("!{Alt text}(#{filePath} \"Optional title\")") # Attachment...
else
return
onDrop: (event) ->
utils ?= require './utils'
_ ?= require 'underscore-plus'
path ?= require 'path'
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curPath = editor.getPath()
return unless utils.isMarkdown(curPath)
event.preventDefault()
event.stopPropagation()
pathsToDrop = _.pluck(event.dataTransfer.files, 'path')
# TODO: Pop up warning if there're spaces in filenames!
if pathsToDrop.length > 0
for onePath in pathsToDrop
continue unless onePath?
filename = path.basename(onePath)
if utils.isImage(filename)
attachmentText = " "
else
attachmentText = " !{attachment}(#{onePath} \"#{filename}\") "
editor.insertText(attachmentText)
return
previewFile: ({target}) ->
if window.evnd.init then @loadModule()
filePath = target.dataset.path
return unless filePath
for editor in atom.workspace.getTextEditors() when editor.getPath() is filePath
@addPreviewForEditor(editor)
return
atom.workspace.open "ever-notedown-preview://#{encodeURI(filePath)}",
searchAllPanes: true
saveHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.saveAs()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.saveAs()
else if isEVNDPreviewView(paneItem)
paneItem.saveAs()
copyHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.copyToClipboard()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.copyToClipboard()
else if isEVNDPreviewView(paneItem)
paneItem.copyToClipboard()
getMarkdownScopeName: ->
grammar = @getEVNDGrammar()
scopeName = grammar?.scopeName ? 'source.gfm'
return scopeName
getEVNDGrammarScopeName: ({evndGrammar, mathjax}={})->
scopeNameDict =
litcoffee: 'source.litcoffee'
litcoffeeMathJax: 'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
gfm: 'text.markdown.evnd.source.gfm.inline.html'
gfmMathJax: 'text.markdown.evnd.mathjax.source.gfm.inline.html'
evndGrammar ?= atom.config.get('ever-notedown.evndGrammar')
mathjax ?= atom.config.get('ever-notedown.mathjax')
switch evndGrammar
when 'Extended source.litcoffee'
scopeName = if mathjax then scopeNameDict.litcoffeeMathJax else scopeNameDict.litcoffee
when 'Extended source.gfm'
scopeName = if mathjax then scopeNameDict.gfmMathJax else scopeNameDict.gfm
return scopeName
getEVNDGrammar: ({mathjax}={}) ->
scopeName = @getEVNDGrammarScopeName({mathjax: mathjax})
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar?
return grammar
# grammar doesn't exists?
evndGrammar = atom.config.get('ever-notedown.evndGrammar')
switch evndGrammar
when 'Extended source.litcoffee'
gramamr = atom.grammars.grammarForScopeName('source.litcoffee')
when 'Extended source.gfm'
grammar = atom.grammars.grammarForScopeName('source.gfm')
return gramamr
addInlineHTMLGrammar: ->
inlineHTMLGrammar = atom.grammars.grammarForScopeName('evnd.inline.html')
unless inlineHTMLGrammar?
inlineHTMLGrammarPath = path.join __dirname, 'grammars/', 'evnd-inline-html.cson'
inlineHTMLGrammar = atom.grammars.readGrammarSync inlineHTMLGrammarPath
atom.grammars.addGrammar inlineHTMLGrammar
addEVNDGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = null
when 'Extended source.gfm' then grammarFileName = 'evnd.cson'
if grammarFileName?
@addInlineHTMLGrammar()
evndGrammarPath = path.join __dirname, 'grammars/', grammarFileName
evndGrammar = atom.grammars.readGrammarSync evndGrammarPath
atom.grammars.addGrammar(evndGrammar)
else
evndGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless evndGramamr?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(evndGrammar)
removeEVNDGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
defaultGrammar = atom.grammars.grammarForScopeName('source.gfm')
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(defaultGrammar)
setEVNDGrammar: (editor) ->
return unless editor?
evndGrammar = @getEVNDGrammar({mathjax: false})
if evndGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(evndGrammar)
addMathJaxGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = 'evnd-litcoffee-mathjax.cson'
when 'Extended source.gfm' then grammarFileName = 'evnd-mathjax.cson'
if grammarFileName?
@addInlineHTMLGrammar()
mathjaxGrammarPath = path.join __dirname, 'grammars/', grammarFileName
mathjaxGrammar = atom.grammars.readGrammarSync mathjaxGrammarPath
atom.grammars.addGrammar(mathjaxGrammar)
else
mathjaxGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless mathjaxGrammar?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(mathjaxGrammar)
setMathJaxGrammar: (editor) ->
return unless editor?
mathjaxGrammar = @getEVNDGrammar({mathjax: true})
if mathjaxGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(mathjaxGrammar)
removeMathJaxGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
evndGrammar = @getEVNDGrammar({mathjax: false})
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName?.indexOf('mathjax') > -1
editor.setGrammar(evndGrammar)
switchEVNDGrammar: (newEVNDGrammar, mathjax) ->
mathjax ?= atom.config.get('ever-notedown.mathjax')
newEVNDGrammarScopeName = @getEVNDGrammarScopeName({evndGrammar: newEVNDGrammar, mathjax: mathjax})
newEVNDGrammar = atom.grammars.grammarForScopeName(newEVNDGrammarScopeName)
if not newEVNDGrammar?
if mathjax then @addMathJaxGrammar() else @addEVNDGrammar()
return
else
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
editor.setGrammar(newEVNDGrammar)
loadModule: ->
{TextEditor} = require 'atom' unless TextEditor?
utils ?= require './utils'
CSON ?= require 'season'
fs ?= require 'fs-plus'
path ?= require 'path'
git ?= require 'git-utils'
_ ?= require 'underscore-plus'
evernoteHelper ?= require './evernote-helper'
storage ?= require './storage-manager'
noteHelper ?= require './note-helper'
mathjaxHelper ?= require './mathjax-helper'
{fenceNameForScope} = require './extension-helper' unless fenceNameForScope?
cheerio ?= require 'cheerio'
clipboard ?= require 'clipboard'
url ?= require 'url'
SearchResultListView ?= require './search-result-list-view'
SearchNoteView ?= require './search-note-view'
NoteManagerView ?= require './note-manager-view' # Defer until used
EVNDPreviewView ?= require './ever-notedown-preview-view' # Defer until used
EVNDView ?= require './ever-notedown-view' # Defer until used
renderer ?= require './renderer' # Defer until used
if window.evnd.init
for paneItem in atom.workspace.getPaneItems() when isEVNDPreviewView(paneItem)
paneItem.renderMarkdown()
@loadCSS()
@loadTemplate()
if atom.config.get('ever-notedown.mathjax')
@addMathJaxGrammar()
else
@addEVNDGrammar()
@loadGitRepo null, null, (newStorageManager) =>
window.evnd.storageManager = newStorageManager
window.evnd.svgCollections = {}
window.evnd.newNoteDisposables = {}
window.evnd.gitPath = window.evnd.storageManager.gitPath
window.evnd.gitPathSymlink = window.evnd.storageManager.gitPathSymlink
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
for paneItem in atom.workspace.getPaneItems()
if isEVNDPreviewView(paneItem) and not paneItem.note?
filePath = paneItem.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
if (not paneItem.noteID?) and note?
paneItem.noteID = note.id
paneItem.attachNote(note)
#
# TODO: Implement this!
#
#@subscriptions.add atom.config.observe 'ever-notedown.renderDiagrams', (toRender) =>
# if toRender and not window.evnd.chartsLibsLoaded
# chartsHelper ?= require './charts-helper'
# chartsHelper.loadChartsLibraries()
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPath', (event) =>
newGitPath = event.newValue
reloadGitRepo = =>
@loadGitRepo newGitPath, null, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPath = newGitPath
dmsg = "Changing git repo path for EVND to #{newGitPath}"
if atom.config.get('ever-notedown.gitPathSymlink') then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPath', event.oldValue
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPathSymlink', (event) =>
gitPathSymlink = event.newValue
reloadGitRepo = =>
@loadGitRepo null, gitPathSymlink, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPathSymlink = gitPathSymlink
dmsg = "Changing git repo path for EVND to #{atom.config.get('ever-notedown.gitPath')}"
if gitPathSymlink then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPathSymlink', event.oldValue
@subscriptions.add atom.config.observe 'ever-notedown.noteTemplate', (newTemplateName) =>
@loadTemplate(newTemplateName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.theme', (event) =>
newThemeName = event.newValue
@loadCSS(newThemeName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.syntaxTheme', (event) =>
newSyntaxThemeName = event.newValue
@loadCSS(null, newSyntaxThemeName)
# TODO: ...
@subscriptions.add atom.config.observe 'ever-notedown.mathjax', (mathjax) =>
if mathjax
mathjaxHelper.loadMathJax()
@addMathJaxGrammar()
else
mathjaxHelper.unloadMathJax()
@removeMathJaxGrammar()
@subscriptions.add atom.config.onDidChange 'ever-notedown.evndGrammar', (event) =>
mathjax = atom.config.get('ever-notedown.mathjax')
@switchEVNDGrammar(event.newValue, mathjax)
@subscriptions.add atom.config.observe 'ever-notedown.mathjaxCustomMacros', (customMacros) =>
mathjaxHelper.reconfigureMathJax() # TODO: this isn't working!
@subscriptions.add atom.config.observe 'ever-notedown.sortBy', (sortBy) =>
window.evnd.noteManagerView?.destroy()
window.evnd.noteManagerView = null
window.evnd.searchResultListView?.destroy()
window.evnd.searchResultListView = null
@subscriptions.add atom.workspace.observeTextEditors (editor) =>
if (editor?.getGrammar()?.scopeName in ['source.gfm', 'source.litcoffee']) or
utils.isMarkdown(editor?.getPath?())
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
@subscriptions.add atom.workspace.observeActivePaneItem (activeItem) =>
if activeItem is atom.workspace.getActiveTextEditor() and activeItem?.id
previewView = @getPreviewViewForEditor(activeItem)
if previewView?
editorPane = atom.workspace.paneForItem(activeItem)
previewPane = atom.workspace.paneForItem(previewView)
if previewPane isnt editorPane and
previewPane?.getActiveItem() isnt previewView
previewPane.activateItem(previewView)
window.evnd.init = false
| 79345 | # Used some code from https://github.com/atom/markdown-preview/blob/9ff76ad3f6407a0fb68163a538c6d460280a1718/lib/main.coffee
#
# Reproduced license info:
# Copyright (c) 2014 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
path = require 'path'
{CompositeDisposable, Disposable} = require 'atom'
{File, Directory} = require 'atom'
{$} = require 'atom-space-pen-views'
TextEditor = null
CSON = null
chartsHelper = null
evernoteHelper = null # delay require './evernote-helper'
storage = null # delay require './storage-manager'
noteHelper = null # require './note-helper'
mathjaxHelper = null
utils = null
fenceNameForScope = null #require './extension-helper'
fs = null #require 'fs-plus'
git = null # requrie 'git-utils'
cheerio = null # require 'cheerio'
clipboard = null
_ = null # require 'underscore-plus'
# used some code from atom/markdown-preview/lib/main.coffee
url = null #require 'url'
NoteManagerView = null # Defer until used
EVNDPreviewView = null # Defer until used
EVNDView = null # Defer until used
NoteInfoView = null
ConfirmDialog = null
InfoDialog = null
SearchNoteView = null
SearchResultListView = null
renderer = null # Defer until used
isNoteInfoView = (object) ->
NoteInfoView ?= require './info-dialog'
object instanceof NoteInfoView
isNoteManagerView = (object) ->
NoteManagerView ?= require './note-manager'
object instanceof NoteManagerView
isEVNDView = (object) ->
EVNDView ?= require './ever-notedown-view'
object instanceof EVNDView
createEVNDPreviewView = (state) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
new EVNDPreviewView(state)
isEVNDPreviewView = (object) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
object instanceof EVNDPreviewView
atom.deserializers.add
name: 'EVNDPreviewView'
deserialize: (state) ->
createEVNDPreviewView(state) if state.constructor is Object
defaultGitPath = path.join atom.getConfigDirPath(), 'evnd/'
themeDict =
"Default": "assets/themes/default/style.css"
"Default 2": "assets/themes/default2/style.css"
"Default 3": "assets/themes/default3/style.css"
"Atom": "assets/themes/atom/style.css"
"Custom 1": "assets/themes/custom1/style.css"
"Custom 2": "assets/themes/custom2/style.css"
syntaxThemeDict =
"Default": "assets/themes-syntax/default/style.css"
"Default 2": "assets/themes-syntax/default2/style.css"
"One Light": "assets/themes-syntax/one-light/style.css"
"One Dark": "assets/themes-syntax/one-dark/style.css"
"Solarized Light": "assets/themes-syntax/solarized-light/style.css"
"Solarized Dark": "assets/themes-syntax/solarized-dark/style.css"
"Github": "assets/themes-syntax/github/style.css"
"Chester": "assets/themes-syntax/chester/style.css"
"Tomorrow": "assets/themes-syntax/tomorrow/style.css"
"IDLE": "assets/themes-syntax/IDLE/style.css"
"Seti Syntax": "assets/themes-syntax/seti-syntax/style.css"
"Cobalt": "assets/themes-syntax/cobalt/style.css"
"Monokai": "assets/themes-syntax/monokai/style.css"
"Serpia": "assets/themes-syntax/serpia/style.css"
"Custom 1": "assets/themes-syntax/custom1/style.css"
"Custom 2": "assets/themes-syntax/custom2/style.css"
noteTemplateDict =
"Default": "assets/templates/default.markdown"
"Lecture Notes": "assets/templates/lecture_notes.markdown"
"None": ""
"Custom 1": "assets/templates/custom1.markdown"
"Custom 2": "assets/templates/custom2.markdown"
evndGrammarList = [
'source.gfm'
'source.litcoffee'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
# Global variables?
window.evnd =
evndView: null
editor: null
searchNoteView: null
searchResultListView: null
noteManagerView: null
cssTheme: ""
cssCode: ""
template: ""
noteIndex: null
storageManager: null
enHelper: null
init: null
chartsLibsLoaded: null
gitPath: null
gitPathSymlink: null
svgCollections: null
newNoteDisposables: null
module.exports =
config:
showPath:
type: 'boolean'
default: true
order: 1
gitPath:
type: 'string'
default: defaultGitPath
description: 'Default location to store your ever-notedown notes, GIT-backed'
order: 2
gitPathSymlink:
type: 'boolean'
default: true
description: 'Check this if the specified gitPath is a symbolic link'
order: 3
openNoteInEvernoteAuto:
title: 'Open Note in Evernote'
type: 'boolean'
default: false
description: "Automatically open note in Evernote client after note creation or modification"
order: 4
pulledContentInSplitPane:
type: 'boolean'
default: false
description: "After loading note content from the Evernote client database, put the loaded content in a separate pane as a new file? (default: false, will overwrite old note content)."
order: 5
sortBy:
type: 'string'
default: 'Title'
enum: ['default', 'Title', 'Notebook', 'Creation Time', 'Modification Time']
description: 'Default sorting is the order in which the notes are displayed in the drop-down note browser'
order: 6
convertHexNCR2String:
title: 'Convert Hex NCR to String'
type: 'boolean'
default: true
description: 'When importing (or pulling) from Evernote, convert hex NCR represented Unicode characters to UTF8 string'
order: 7
defaultFormat:
type: 'string'
default: 'Markdown'
enum: ['Text', 'Markdown', 'HTML']
description: '(Please choose only "Markdown" for now...)'
order: 7
codeSnippet:
type: 'boolean'
default: true
description: 'Render selected content as a fenced code block'
order: 8
toc:
title: 'TOC'
type: 'boolean'
default: true
description: 'Enable Table of Contents generation ([TOC])'
order: 9
checkbox:
type: 'boolean'
default: true
description: 'Render ([ ], [x]) as checkboxes everywhere'
order: 10
footnote:
type: 'boolean'
default: true
description: 'Parse footnotes in MMD style...([^text] for reference, [^text]: for definition)'
order: 11
mathjax:
type: 'boolean'
default: true
description: 'Enable MathJax processing'
order: 12
mathjaxOutput:
type: 'string'
default: 'SVG'
enum: ['SVG'] #['SVG', 'HTML/CSS']
order: 13
mathjaxCustomMacros:
type: 'string'
default: "Physical Sciences"
enum: [
"None",
"Default",
"Physical Sciences",
"Math",
"Custom 1",
"Custom 2"
]
order: 14
description: 'Use custom defined macros (~/.atom/packages/ever-notdown/assets/mathjax/macros/custom.json) for MathJax rendering. (After making changes, please use "View -> Reload" for the change to take effect.)'
breakOnSingleNewline:
type: 'boolean'
default: false
description: 'Markdown rendering option'
order: 15
smartyPants:
type: 'boolean'
default: false
description: 'Use "smart" typograhic punctuation for things like quotes and dashes.'
order: 16
noteTemplate:
type: 'string'
default: 'Default'
description: 'Template for creating new note'
enum: [
"Default",
"Lecture Notes",
"Custom 1",
"Custom 2",
"None"
]
order: 17
theme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"Default 3",
"Atom",
"Custom 1",
"Custom 2"
]
order: 18
syntaxTheme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"One Light",
"One Dark",
"Solarized Light",
"Solarized Dark",
"Github",
"Chester",
"Tomorrow",
"IDLE",
"Seti Syntax",
"Cobalt",
"Monokai",
"Serpia",
"Custom 1",
"Custom 2"
]
order: 19
liveUpdate:
type: 'boolean'
default: true
description: 'For Markdown Preview'
order: 20
openPreviewInSplitPane:
type: 'boolean'
default: true
order: 21
syncScroll:
type: 'boolean'
default: true
description: 'Sync scrolling between the editor and the preview pane'
order: 22
grammars:
type: 'array'
default: [
'source.gfm'
'source.litcoffee'
'text.html.basic'
'text.plain'
'text.plain.null-grammar'
'text.markdown.evnd.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
]
order: 23
evndGrammar:
title: 'Extended grammar for syntax highlighting markdown files in editor'
type: 'string'
order: 24
enum: [
'Extended source.litcoffee'
'Extended source.gfm'
]
default: 'Extended source.gfm'
description: 'Support extra syntax highlighting, eg: inline HTML, MathJax equations, etc.'
subscriptions: null
# TODO: This CSS matter... should we just go for "getMarkdownPreviewCSS"?
activate: (state) ->
return unless process.platform is 'darwin' # OSX Only!
window.evnd.init = true
window.evnd.chartsLibsLoaded = false
#console.log atom.config.get('ever-notedown.gitPath')
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
mathjax = atom.config.get('ever-notedown.mathjax')
if mathjax
mathjaxHelper = require('./mathjax-helper')
mathjaxHelper.loadMathJax()
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register commands
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle': =>
@toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-note-list': =>
@createNoteManagerView(state).toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:selection-to-evernote', =>
@sel2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:file-to-evernote', =>
@file2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:pull-current-note-from-evernote', =>
@pullFromEvernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:import-note-from-evernote', =>
@showImportNotePanel()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:new-note', =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-config', =>
@openConfig()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-help-document', =>
@openHelpDoc()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:markdown-quick-ref', =>
@openMarkdownQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:mathjax-quick-ref', =>
@openMathJaxQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:notes-for-developers', =>
@openDevNotes()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-preview': =>
@togglePreview()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:copy-rendered-html': =>
@copyHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:save-rendered-html': =>
@saveHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-break-on-single-newline': =>
keyPath = '<KEY>'
atom.config.set(keyPath, not atom.config.get(keyPath))
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-note-template': =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom1': =>
@editCustomTemplate('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom2': =>
@editCustomTemplate('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-theme-css': =>
@viewThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom1': =>
@editCustomThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom2': =>
@editCustomThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-syntax-theme-css': =>
@viewSyntaxThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom1': =>
@editCustomSyntaxThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom2': =>
@editCustomSyntaxThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-mathjax-macros': =>
@viewMathJaxMacros()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom1': =>
@editCustomMacros('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom2': =>
@editCustomMacros('Custom 2')
@subscriptions.add atom.commands.add 'atom-text-editor', 'drop': (event) =>
#console.log 'Dropping item!'
@onDrop(event)
@subscriptions.add atom.commands.add 'atom-text-editor',
'core:paste': (event) =>
#console.log "Pasting stuff!"
event.stopPropagation()
@pasteImage()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:bold-text': =>
@boldText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:emphasis-text': =>
@emphasisText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:underline-text': =>
@underlineText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:highlight-text': =>
@highlightText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:strikethrough-text': =>
@strikeThroughText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:blockquote': =>
@blockquote()
@subscriptions.add atom.workspace.observePaneItems (item) =>
if isEVNDPreviewView(item)
item.disposables.add item.onDidClickButtonEvernote (editor, previewView) =>
@file2Evernote(editor, previewView)
item.disposables.add item.onDidClickButtonPull (filePath, previewView) =>
@pullFromEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonNewNote =>
@openNewNote()
item.disposables.add item.onDidClickButtonHome =>
@toggle()
item.disposables.add item.onDidClickButtonEye (filePath, previewView) =>
@openNoteInEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonInfo (note) =>
@confirmedNoteItem({note: note})
item.disposables.add item.onDidClickButtonHTML (note) =>
@getNoteHTML({note: note})
item.disposables.add item.onDidClickButtonENML (note) =>
@getNoteENML({note: note})
item.disposables.add item.onDidClickButtonFolder (notePath) =>
@openFinder(notePath)
@subscriptions.add item.disposables
previewFile = @previewFile.bind(this)
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.markdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.md]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkd]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.ron]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.txt]',
'ever-notedown:preview-file', previewFile
atom.workspace.addOpener (uriToOpen) ->
try
{protocol, host, pathname} = url.parse(uriToOpen)
catch error
return
return unless protocol is 'ever-notedown-preview:'
try
pathname = decodeURI(pathname) if pathname
catch error
return
if host is 'editor'
createEVNDPreviewView(editorId: pathname.substring(1))
else
createEVNDPreviewView(filePath: pathname)
deactivate: ->
# TODO: manage storage?
#if atom.config.get('ever-notedown.mathjax') then @removeMathJaxGrammar()
@removeEVNDGrammar()
window.evnd.noteIndex?.update()
window.evnd.noteManagerView?.destroy?()
window.evnd.searchResultListView?.destroy?()
window.evnd.evndView?.destroy()
@subscriptions.dispose()
for k, v of window.evnd
if k in ["cssTheme", "cssCode", "template"]
window.evnd[k] = ""
else
window.evnd[k] = null
serialize: ->
noteManagerViewState: window.evnd.noteManagerView?.serialize()
toggle: ->
if window.evnd.init then @loadModule()
unless window.evnd.evndView?
EVNDView ?= require './ever-notedown-view'
newEVNDView = new EVNDView(@)
newEVNDView.disposables.add newEVNDView.onDidClickButtonImportNote =>
@showImportNotePanel()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNewNote =>
newEVNDView.hide()
@openNewNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote =>
@deleteNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNoteList =>
@createNoteManagerView(@).toggle()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenConfig =>
@openConfig()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenHelp =>
@openHelpDoc()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenNote (note) =>
newEVNDView.hide()
@openNote(note)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenFinder (notePath) =>
@openFinder(notePath)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenInfo (note) =>
@confirmedNoteItem({note: note})
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote (note) =>
@deleteNote {note:note}, (deleted) =>
if deleted then newEVNDView.refresh()
newEVNDView.disposables.add newEVNDView.onDidClickButtonExportNote () =>
@saveHtml()
@subscriptions.add newEVNDView.disposables
window.evnd.evndView = newEVNDView
window.evnd.evndView.toggle(@)
#
# Based on the official Atom Markdown Preview package
# Updated Nov 15, 2015
# TODO: move these functions to `ever-notedown-preview-view.coffee`
#
getTextEditorStyles: ->
textEditorStyles = document.createElement("atom-styles")
textEditorStyles.initialize(atom.styles)
textEditorStyles.setAttribute "context", "atom-text-editor"
document.body.appendChild textEditorStyles
# Extract style elements content
Array.prototype.slice.apply(textEditorStyles.childNodes).map (styleElement) ->
styleElement.innerText
# TODO: remove the particular {overflow-y: scroll;}?
getMarkdownPreviewCSS: ->
return @markdownPreviewCSS if @markdownPreviewCSS
markdowPreviewRules = []
ruleRegExp = /\.ever-notedown-preview/
cssUrlRefExp = /url\(atom:\/\/ever-notedown\/assets\/(.*)\)/
for stylesheet in document.styleSheets
if stylesheet.rules?
for rule in stylesheet.rules
# We only need `.ever-notedown-preview` css
markdowPreviewRules.push(rule.cssText) if rule.selectorText?.match(ruleRegExp)?
@markdownPreviewCSS = markdowPreviewRules
.concat(@getTextEditorStyles())
.join('\n')
.replace(/([^\.])atom-text-editor/g, '$1pre.editor-colors') # <atom-text-editor> are now <pre>
.replace(/:host/g, '.host') # Remove shadow-dom :host selector causing problem on FF
.replace cssUrlRefExp, (match, assetsName, offset, string) -> # base64 encode assets
assetPath = path.join __dirname, '../assets', assetsName
originalData = fs.readFileSync assetPath, 'binary'
base64Data = new Buffer(originalData, 'binary').toString('base64')
"url('data:image/jpeg;base64,#{base64Data}')"
@markdownPreviewCSS
editCustomSyntaxThemeCSS: (syntaxThemeName) ->
return unless syntaxThemeName.indexOf('Custom') > -1 and syntaxThemeDict[syntaxThemeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeDict[syntaxThemeName]
atom.workspace.open(syntaxThemeFilePath)
viewSyntaxThemeCSS: ->
@loadCSS() unless window.evnd.cssCode?
syntaxThemeCSS = window.evnd.cssCode
atom.workspace.open('').then (editor) =>
editor.setText(syntaxThemeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
editCustomThemeCSS: (themeName) ->
return unless themeName?.indexOf('Custom') > -1 and themeDict[themeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeDict[themeName]
atom.workspace.open(themeFilePath)
viewThemeCSS: ->
@loadCSS() unless window.evnd.cssTheme?
themeCSS = window.evnd.cssTheme
atom.workspace.open('').then (editor) =>
editor.setText(themeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
loadCSS: (themeName, syntaxThemeName) ->
# Load defined CSS themes
themeName ?= atom.config.get('ever-notedown.theme')
themeFileName = themeDict[themeName]
syntaxThemeName ?= atom.config.get('ever-notedown.syntaxTheme')
syntaxThemeFileName = syntaxThemeDict[syntaxThemeName]
return unless themeFileName? and syntaxThemeFileName?
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeFileName
window.evnd.cssTheme = fs.readFileSync(themeFilePath, 'utf8')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeFileName
window.evnd.cssCode = fs.readFileSync(syntaxThemeFilePath, 'utf8')
themePath = path.join evndPkgPath, "styles/theme.css"
themeCSS = window.evnd.cssTheme + window.evnd.cssCode
fs.writeFileSync(themePath, themeCSS, 'utf8')
@reloadTheme(themeCSS, {sourcePath: themePath})
return themeCSS
reloadTheme: (source, params) ->
return unless source
#console.log "Reloading css style sheet... #{params.sourcePath}"
sourcePath = params?.sourcePath
sourcePath ?= path.join atom.packages.resolvePackagePath('ever-notedown'), 'styles/theme.css'
priority = params?.priority
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
priority ?= styleElement.priority ? 0
atom.styles.removeStyleElement(styleElement)
#break
params.priority = priority
atom.styles.addStyleSheet(source, params)
@markdownPreviewCSS = null
removeTheme: (sourcePath) ->
return unless sourcePath
#console.log "Removing css style sheet... #{sourcePath}"
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
atom.styles.removeStyleElement(styleElement)
break
viewTemplate: ->
if window.evnd.init then @loadModule()
template = window.evnd.template ? @loadTemplate()
atom.workspace.open('').then (editor) =>
editor.setText(template)
editCustomTemplate: (templateName) ->
return unless templateName?.indexOf('Custom') > -1 and
noteTemplateDict[templateName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
atom.workspace.open templateFilePath, {searchAllPanes: true}
loadTemplate: (templateName) ->
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateName ?= atom.config.get('ever-notedown.noteTemplate')
if templateName is "None"
window.evnd.template = ""
else
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
window.evnd.template = fs.readFileSync(templateFilePath, 'utf8')
return window.evnd.template
viewMathJaxMacros: ->
if window.evnd.init then @loadModule()
unless atom.config.get('ever-notedown.mathjax')
window.alert "MathJax is not enabled currently!"
return
mathjaxHelper ?= require './mathjax-helper'
console.log mathjaxHelper
macros = mathjaxHelper.loadCustomMacros()
console.log macros
atom.workspace.open('').then (editor) =>
editor.setText(mathjaxHelper.macrosToCSONString(macros))
grammar = atom.grammars.grammarForScopeName('source.coffee')
if grammar? then editor.setGrammar(grammar)
editCustomMacros: (macroName) ->
if window.evnd.init then @loadModule()
mathjaxHelper ?= require './mathjax-helper'
return unless macroName?.indexOf('Custom') > -1 and
mathjaxHelper.macroPaths[macroName]?
atom.workspace.open(mathjaxHelper.macroPaths[macroName])
getGitDir: (gitPath, gitPathSymlink) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
if window.evnd.storageManager?.gitDir? and
window.evnd.storageManager.gitPath is gitPath and
window.evnd.storageManager.gitPathSymlink is gitPathSymlink
return window.evnd.storageManager?.gitDir
gitDir = new Directory(gitPath, gitPathSymlink)
return gitDir
getRealGitPath: ->
gitDir = @getGitDir()
return gitDir.getRealPathSync()
loadGitRepo: (gitPath, gitPathSymlink, callback) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
#console.log "Git Path: " + gitPath
storage ?= require './storage-manager'
gitDir = @getGitDir(gitPath, gitPathSymlink)
loadGitRepoNormal = =>
if window.evnd.storageManager?.gitPath is gitPath and
window.evnd.storageManager?.gitPathSymlink is gitPathSymlink and
window.evnd.storageManager?.gitDir?.existsSync()
if window.evnd.storageManager.gitRepo is null
window.evnd.storageManager.initRepo () =>
callback(window.evnd.storageManager)
else
callback(window.evnd.storageManager)
else
storageOptions =
gitPath: gitPath
gitPathSymlink: gitPathSymlink
gitRepo: null
gitDir: gitDir
window.evnd.storageManager ?= new storage.StorageManager(storageOptions)
callback(window.evnd.storageManager)
if not gitDir.existsSync()
dmsg = "The current GIT directory #{gitPath} "
if gitPathSymlink then dmsg += "(symolic link) "
dmsg += "for EVND doesn't exist!"
atom.confirm
message: dmsg
buttons:
"mkdir": =>
@initGitDir gitDir, () =>
loadGitRepoNormal()
"Open Settings": =>
@openConfig()
callback(null)
return
else
loadGitRepoNormal()
initGitDir: (gitDir, callback) ->
gitDir ?= @getGitDir()
if gitDir?.existsSync()
if callback? then callback()
return
atom.confirm
message: "Will create directory at #{gitDir.getRealPathSync()}"
buttons:
"Confirm": =>
fs ?= require 'fs-plus'
fs.makeTreeSync(gitDir.getRealPathSync())
if callback? then callback()
"Cancel": =>
if callback? then callback()
return
initJSONFile: (jsonFile, callback) ->
@initGitDir null, () =>
jsonFile.write("{}")
if callback? then callback()
loadJSON: (callback) ->
path ?= require 'path'
jsonPath = path.join(@getRealGitPath(), "index.json")
jsonFile = new File(jsonPath)
loadJSONNormal = =>
jsonFile.read().then (jsonString) =>
jobj = JSON.parse(jsonString)
noteHelper ?= require './note-helper'
options =
jsonOBJ: jobj
absfilename: jsonPath
file: jsonFile
callback(new noteHelper.NoteIndex(options))
if jsonFile.existsSync()
loadJSONNormal()
else
@initJSONFile jsonFile, () =>
loadJSONNormal()
openConfig: ->
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open 'atom://config/packages/ever-notedown/', options
openHelpDoc: ->
if window.evnd.init then @loadModule()
pathToHelpDoc = path.join __dirname, '../docs/help.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToHelpDoc, options).then (editor) =>
@addPreviewForEditor(editor)
openMarkdownQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openMathJaxQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openDevNotes: ->
if window.evnd.init then @loadModule()
pathToDevNotes = path.join __dirname, '../docs/dev_notes.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToDevNotes, options).then (editor) =>
@addPreviewForEditor(editor)
#
# toggle the search panel (similar to find-and-replace)
#
showImportNotePanel: ->
if window.evnd.init then @loadModule()
if window.evnd?.searchNoteView?.panel?
window.evnd.searchNoteView.show()
else
SearchNoteView ?= require './search-note-view'
window.evnd.searchNoteView = new SearchNoteView()
window.evnd.searchNoteView.show()
@subscriptions.add window.evnd.searchNoteView.onDidSearchWithString ({queryString, noteLink}={}) =>
@searchNotes({queryString:queryString, noteLink:noteLink})
#
# Open note list (Scroll List view)
#
createNoteManagerView: (state) ->
if window.evnd.init then @loadModule()
unless window.evnd.noteManagerView?
NoteManagerView ?= require './note-manager-view'
window.evnd.noteManagerView = new NoteManagerView()
@subscriptions.add window.evnd.noteManagerView.onDidConfirmNote (noteID) =>
@confirmedNoteItem({noteID: noteID})
window.evnd.noteManagerView
#
# Import from Evernote?
#
searchNotes: ({queryString, noteLink}={}) ->
if window.evnd.init then @loadModule()
reg0 = /^https\:\/\/www\.evernote\.com\/shard\/([^\s\/]+)\/[^\s\/]+\/([^\s\/]+)\/([^\s\/]+)\/$/i
if noteLink? and reg0.test(noteLink) #noteLink.slice(0, 8) is 'https://'
matched = reg0.exec(noteLink)
noteLink = "evernote:///view/#{matched[2]}/#{matched[1]}/#{matched[3]}/#{matched[3]}/"
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.searchNotes {queryString:queryString, noteLink: noteLink}, (result) =>
if (not result) or (not result.found) or (result? and _.size(result) < 2)
window.alert("No results found!")
return
else
SearchResultListView ?= require './search-result-list-view'
window.evnd.searchResultListView = new SearchResultListView(result)
window.evnd.searchResultListView.show()
@subscriptions.add window.evnd.searchResultListView.onDidConfirmSearchResult (noteLink) =>
@importFromEvernote({noteLink: noteLink})
handleToEvernoteError: (error, noteOptions) ->
message = "#{error.message} when trying to send note to Evernote"
detail = "Note options:\n"
for k, v of noteOptions
continue if k in ["rawHTML", "text", "css"]
detail += " #{k}: #{JSON.stringify(v)}\n"
stack = "#{error.stack}\n"
atom.notifications.addError(message, {stack: stack, detail: detail, dismissable: true})
# TODO: Handles "code snippet"
# TODO: use selection.getScreenRange() (for code annotating?)
#
sel2Evernote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curFilePath = editor.getPath()
lastSelection = editor.getLastSelection()
selectionText = lastSelection.getText().toString() #editor.getSelectedText()
bufferRowRange = lastSelection.getBufferRowRange()
rowRange = "#L#{(bufferRowRange[0]+1).toString()}-#{(bufferRowRange[1]+1).toString()}"
if selectionText.trim().length is 0
window.alert "Nothing selected!"
return
# Convert selected text (Markdown) to HTML
# TODO: if current file is code file (selected text is code snippet), render
# TODO: renderer, async???
textContent = selectionText ? "Nothing here"
parsedInput = utils.parseMetaData(textContent)
newTitle = parsedInput.title
newTextContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
if utils.isMarkdown(curFilePath) or
editor.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
renderOptions = {mathjax: atom.config.get('ever-notedown.mathjax')}
else if atom.config.get('ever-notedown.codeSnippet')
if path.basename(curFilePath)?
newTitle = "Code snippet: #{path.basename(curFilePath)}#{rowRange}"
scopeName = editor.getGrammar()?.scopeName
fenceName = if scopeName? then fenceNameForScope(scopeName) else ""
newTextContent = "```#{fenceName}\n#{newTextContent}\n```\n"
newTextContent += "\n<br><br>**Source file**: #{curFilePath} \n"
newTextContent += "<br>**Clipped Time**: #{utils.getCurrentTimeString()} \n"
textContent = metaText + "\n\n" + newTextContent
renderOptions = {mathjax: false}
else
newHtmlContent = null
noteFormat = "Text"
tmpCss = null
if noteFormat is "Text"
options =
title: newTitle
update: false
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
else
renderer ?= require './renderer'
renderer.toHTML newTextContent, renderOptions.mathjax, editor.getPath(),
parsedInput, editor.getGrammar(), (error, html) =>
if error
console.error('Converting Markdown to HTML failed', error)
return # TODO: notify user
else
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else @loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
noteFormat = "Markdown"
newHtmlContent = html
options =
title: newTitle
update: false
moved: true
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
file2Evernote: (editor, previewView) ->
if window.evnd.init then @loadModule()
if previewView?
testView = previewView
editor ?= previewView.editor
else
testView ?= atom.workspace.getActivePane().getActiveItem()
editor ?= atom.workspace.getActiveTextEditor()
return unless editor? or isEVNDPreviewView(testView)
# update note in Evernote if current file is already in the EVND git repo
if editor?
curFilePath = editor.getPath()
else
editorId = parseInt(testView.editorId)
editor = testView.editor
curFilePath = testView.filePath
if editor?
curFilePath = editor.getPath()
else if curFilePath?
editor = atom.workspace.openSync(curFilePath, {searchAllPanes: true})
return unless curFilePath? and editor?
unless curFilePath?
if editor?
dMsg = "EVND will now try to save it as a new note... please try again later."
atom.notifications.addWarning("File is not yet saved!", {detail: dMsg, dismissable: true})
utils.timeOut(1000)
@saveNewNote(editor)
else
window.alert "File not saved! Cannot send to Evernote... please save first."
return
#if curFilePath.indexOf(atom.config.get('ever-notedown.gitPath')) > -1
gitPath0 = @getRealGitPath()
gitPath1 = atom.config.get('ever-notedown.gitPath')
if curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1
update = true
moved = false
#console.log("Will update this note...")
else
update = false
moved = true
#console.log("Will create a new note...")
textContent = editor.getText()
parsedInput = utils.parseMetaData(textContent)
newTextContent = parsedInput.content
newTitle = parsedInput.title
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
# TODO: Fix Async!!!
if utils.isMarkdown(curFilePath) or
editor?.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
previewView ?= @getPreviewViewForEditor(editor)
unless previewView?
@addPreviewForEditor(editor)
# TODO: notifiy user
dMsg = "Please check the rendered result in preview pane first!\n"
dMsg += "Please close this message, and wait until "
dMsg += "the preview finishes loading before trying again."
#window.alert(dMsg)
atom.notifications.addWarning('Content not rendered!', {detail: dMsg, dismissable: true})
return
if previewView.loading then utils.timeOut(500)
html = previewView[0].innerHTML
# TODO: Need to properly handle CSS selection
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else window.evnd.loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
newHtmlContent = html
noteFormat = "Markdown"
# Send resulting HTML to Evernote Application (create a new note or update)
else if utils.isHTML(curFilePath) or
editor?.getGrammar()?.scopeName in ['text.html.basic'] or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'HTML')
newHtmlContent = newTextContent
noteFormat = "HTML"
else # no need to convert
newHtmlContent = null
noteFormat = "Text"
options =
title: newTitle
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
update: update
filePath: curFilePath
renderOptions: {mathjax: atom.config.get('ever-notedown.mathjax')}
options.moved = moved
if not moved
options.path = path.dirname(curFilePath)
options.fnStem = path.basename(curFilePath, path.extname(curFilePath))
# Send content to Evernote Application (create a new note or update)
try
@toEvernote options, previewView, (curNote) =>
if options.moved then @openNote(curNote)
catch error
@handleToEvernoteError(error, options)
# TODO: Open the written file (in the default GIT repo)
# TODO: Async?
if options.moved
for editor in atom.workspace.getTextEditors() when editor.getPath() is curFilePath
@removePreviewForEditor(editor)
else
@addPreviewForEditor(editor)
toEvernote: (options, previewView, callback) ->
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
# Send resulting HTML to Evernote Application (create a new note)
# Note: This function contains an async call (osa)
# In the callback function of osa, a global variable should be updated
# TODO: tags, other implicit info encoding, etc.
options.update ?= false
noteHelper ?= require './note-helper'
if options.update
curNote = noteHelper.findNote(window.evnd.noteIndex, {title: options.title, fnStem: path.basename(options.filePath, path.extname(options.filePath)), dir: path.basename(path.dirname(options.filePath))})
if curNote is null
options.update = false
#console.log("Note not found in current note index")
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
else
#console.log("Note found in current note index")
curNote.update window.evnd.storageManager, options
else
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
#console.log("Current Note entity title: " + curNote.title)
window.evnd.noteIndex.addnote(curNote)
# TODO: Async call in storage manager
window.evnd.storageManager.addNote curNote, false, null, () =>
#console.log("Sending to evernote..." + utils.getCurrentTimeString())
unless previewView?
openNoteOptions =
searchAllPanes: true
addPreview: true
@openNote curNote, openNoteOptions, (editor) =>
previewView = @getPreviewViewForEditor(editor)
updateNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.updateNote curNote, curNote.addAttachments, true, (updateSuccess) =>
if updateSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Update Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
#console.log "Update failed!"
window.alert "Update failed!"
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
createNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.createNewNote curNote, (createSuccess) =>
if createSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate ? curNote.enCreationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Create new Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
window.alert "Something went wrong when trying to create new note..."
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
saveOnly = () =>
gitMessage = "Locally updated note \"#{curNote.title}\"\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curnote. true, gitMessage
#console.log(gitMessage)
window.alert(gitMessage.split(/[\n\r]/g)[0])
if options.update
window.evnd.enHelper.getNoteInfo curNote, null, (enNoteInfo) =>
if enNoteInfo?
#console.log("enNoteInfo: " + JSON.stringify(enNoteInfo, null, 4))
#console.log("curNote.lastSyncDate: " + utils.enDateToTimeString(curNote.lastSyncDate))
#console.log("curNote.modificationTime: " + curNote.modificationTime)
if enNoteInfo.enModificationDate isnt curNote.lastSyncDate
dMsg = "On the Evernote client side, this note was last modified "
dMsg += "on #{utils.enDateToTimeString(enNoteInfo.enModificationDate)}. "
dMsg += "But the last time the local note was in sync with the "
dMsg += "Evernote client was #{utils.enDateToTimeString(curNote.lastSyncDate)}.\n"
dMsg += "The local note was modified on #{curNote.modificationTime}.\n"
dMsg += "If you choose \"Update anyway\", the note content in the "
dMsg += "Evernote database will be overwritten AFTER the note is "
dMsg += "exported (you can find the exported note in the EVND folder).\n"
dMsg += "If you choose \"Save only\", the note content will be "
dMsg += "saved to the local EVND folder (with GIT commit), but "
dMsg += "no info will be sent to the Evernote client."
atom.confirm
message: "Alert: possible conflicts!"
detailedMessage: dMsg
buttons:
"Update anyway": -> updateNoteNormal()
"Save only": -> saveOnly()
"Cancel": -> return #console.log("cancelled update note")
else
lastSyncTime = utils.enDateToTimeString(curNote.lastSyncDate)
tMinStr = utils.timeMin(lastSyncTime, curNote.modificationTime)
#console.log(tMinStr)
if tMinStr isnt curNote.modificationTime
updateNoteNormal()
else
window.alert("Note hasn't changed, nothing to update.")
else # no note info was found
createNoteNormal()
else
createNoteNormal()
if callback? then callback(curNote)
openNewNote: (initText, options, callback) ->
# TODO: Template?
if window.evnd.init then @loadModule()
initText ?= window.evnd.template ? @loadTemplate()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
tmpDirPath = @makeNoteDir()
fs.makeTreeSync(tmpDirPath) unless fs.isDirectorySync(tmpDirPath)
options ?= {}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open('', options).then (editor) =>
if initText then editor.setText(initText)
editorElement = atom.views.getView(editor)
window.evnd.newNoteDisposables[editor.id] = atom.commands.add editorElement,
'core:save': (event) =>
event.stopPropagation()
@saveNewNote(editor, tmpDirPath)
switch atom.config.get('ever-notedown.defaultFormat')
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
if addPreview
@addPreviewForEditor editor, null, (previewView) =>
if callback? then callback(editor)
else if callback?
callback(editor)
makeNoteDir: ->
tmpTimeString = utils.getSanitizedTimeString()
tmpIndex = tmpTimeString.indexOf('_')
tmpDirName = if tmpIndex > -1 then tmpTimeString.slice(0, tmpIndex) else tmpTimeString
gitPath = @getRealGitPath()
tmpDirPath = path.join gitPath, tmpDirName
return tmpDirPath
saveNewNote: (editor, noteDir) ->
noteDir ?= @makeNoteDir()
text = editor.getText()
parsedInput = utils.parseMetaData(text)
title = parsedInput.title
textContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
filePath = path.join noteDir, utils.sanitizeFilename(title.toLowerCase()) + ".markdown"
if noteFilePath = atom.showSaveDialogSync(filePath)
options =
title: title
text: text # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
format: "Markdown"
filePath: noteFilePath
fs.writeFileSync(noteFilePath, text)
window.evnd.newNoteDisposables?[editor.id]?.dispose()
@removePreviewForEditor(editor)
editor.getBuffer().setPath(noteFilePath)
newNote = new noteHelper.MarkdownNote(options)
editor.save()
@addPreviewForEditor(editor, newNote)
gitMessage = "Created new note \"#{title}\" (locally) ...\n"
gitMessage += "#{newNote.summary()}"
window.evnd.storageManager.addNote newNote, true, gitMessage
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
openNote: (note, options, callback) ->
# TODO: What if the current note isn't of format "Markdown"?
#console.log "Opening note..."
absPath = note.absPath()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
options ?= {searchAllPanes: true}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open(absPath, options).then (editor) =>
switch note.format
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
#console.log "Note opened, now dealing with preview..."
if addPreview
@addPreviewForEditor editor, note, (previewView) =>
if callback? then callback(editor)
else
@removePreviewForEditor(editor)
#console.log "Note and preview opened, now handling callback..."
if callback? then callback(editor)
openNoteInEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if previewView?.note?
note = previewView.note
else if previewView?.noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: previewView.noteID})
else if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) > -1 or
filePath.indexOf(gitPath1) > -1
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = null
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openNote note, () =>
#console.log "New note opened in Evernote!"
return
openFinder: (notePath) ->
if window.evnd.init then @loadModule()
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openFinder notePath, () =>
#console.log "Note directory opened in Finder!"
return
searchOpenedNote: () ->
noteHelper ?= require './note-helper'
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
editor = atom.workspace.getActiveTextEditor()
if editor? and
(editor.getPath().indexOf(gitPath0) > -1 or
editor.getPath().indexOf(gitPath1) > -1)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
else
curView = atom.workspace.getActivePaneItem()
if isEVNDPreviewView(curView)
if curView.editor?
curFilePath = curView.editor.getPath()
else
curFilePath = curView.filePath
if curFilePath? and
(curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1)
fnStem = path.basename(curFilePath, path.extname(curFilePath))
dir = path.basename(path.dirname(curFilePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
return note
getNoteENML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = @searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getENML note, null, (enml) =>
if enml?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(enml)
grammar = atom.grammars.grammarForScopeName('text.xml')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting ENML failed..."
return
getNoteHTML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getHTML note, null, (html) =>
if html?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(html)
grammar = atom.grammars.grammarForScopeName('text.html.basic')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting HTML failed..."
return
confirmedNoteItem: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getNoteInfo note, null, (newNoteInfo) =>
if newNoteInfo?
window.evnd.enHelper.getAttachmentsInfo note, newNoteInfo.queryString, (newAttachmentsInfo) =>
InfoDialog ?= require './info-dialog'
infoDialog = new InfoDialog()
infoDialog.addInfo(note, newNoteInfo, newAttachmentsInfo)
infoDialog.show()
infoDialog.disposables.add infoDialog.onDidClickDelete (noteID) =>
@deleteNote({noteID:noteID})
infoDialog.disposables.add infoDialog.onDidOpenNote (noteID) =>
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
@openNote(note)
infoDialog.disposables.add infoDialog.onDidPullNote (noteID) =>
@pullFromEvernote(noteID)
@subscriptions.add infoDialog.disposables
else
window.alert("Note info retrieve error! (Maybe this note has not been sent to Evernote? Or it might have already been deleted in Evernote.)")
@openNote(note)
deleteNote: ({note, noteID, noteTitle}={}, callback) ->
if window.evnd.init then @loadModule()
if not note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if noteTitle?
note = noteHelper.findNote(window.evnd.noteIndex, {title: noteTitle})
else
note = @searchOpenedNote()
unless note?
#console.log "No active note (editor or preview) found!"
return
# TODO
confirmedDeleteNote = (note, callback) ->
window.evnd.noteIndex?.removeNote(note)
#console.log "Note #{note.title} deleted..."
for paneItem in atom.workspace.getPaneItems()
if paneItem.getPath? and paneItem.getPath() is note.absPath()
paneItem.destroy()
if callback? then callback(true)
atom.confirm
message: "Confirm: Delete Note \"#{note.title}\"?"
detailedMessage: "This action will remove note \"#{note.title}\" from note Index, but related files will remain on disk for now."
buttons:
"Confirm": => confirmedDeleteNote(note, callback)
"Cancel": =>
#console.log "Cancelled deleting note..."
if callback? then callback(false)
importFromEvernote: ({noteLink} = {}) ->
if window.evnd.init then @loadModule()
return unless noteLink?
note = noteHelper.findNote(window.evnd.noteIndex, {noteLink: noteLink})
if note?
@pullFromEvernote(note.id, note.path, null)
else # Construct a new note entity
# TODO: note format? Markdown? HTML?
window.evnd.enHelper.getNoteInfo null, {noteLink: noteLink}, (noteInfo) =>
enModificationTimeStr = utils.enDateToTimeString(noteInfo.enModificationDate)
noteInfo.creationTime = enModificationTimeStr
noteInfo.modificationTime = enModificationTimeStr
note = new noteHelper.MarkdownNote(noteInfo)
enDest = path.join(note.path, note.fnStem) + "_evernote"
window.evnd.enHelper.retrieveNote noteLink, note.queryString, enDest, () =>
utils.timeOut(200)
if not ("#{enDest}.html/" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.html/")
if not ("#{enDest}.enex" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.enex")
gitMessage = "About to import Evernote note \"#{note.title}\" ...\n"
gitMessage += "#{note.summary()}"
window.evnd.storageManager.addNote note, true, gitMessage
@pullFromEvernote(note.id, note.path, null)
pullFromEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) or filePath.indexOf(gitPath1)
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
pullNoteNormal = (note, options) =>
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.pullNote note, (updated, textContent, html, newNoteInfo) =>
#console.log "Note pulled..."
if not updated
@openNote note, null, () =>
window.alert("Nothing unsync'd! Opening note...")
return
else
openNoteOptions = {addPreview: true}
if options?.newPane or atom.config.get('ever-notedown.pulledContentInSplitPane')
openNoteOptions.addPreview = false
@openNote note, options, () =>
textContent = note.metaTextFromNoteInfo(newNoteInfo) + textContent
for editor in atom.workspace.getTextEditors() when editor.getPath() is note.absPath()
oldText = editor.getText()
if openNoteOptions.addPreview
editor.setText(textContent)
else
openNewNoteOptions = {addPreview:false, split: "right", activatePane: true}
visibleScreenRowRange = editor.getVisibleRowRange()
@openNewNote textContent, openNewNoteOptions, (newEditor) =>
row1 = visibleScreenRowRange[0]
row2 = visibleScreenRowRange[1]
try
newEditor.scrollToScreenPosition [parseInt((row1 + row2)/2), 0], {center: true}
catch e
console.log e
break
if openNoteOptions.addPreview
ConfirmDialog ?= require './confirm-dialog'
confirmDialogOptions =
editorId: editor.id
filePath: editor.getPath()
note: note
oldText: oldText
newText: textContent
newNoteInfo: newNoteInfo
confirmDialog = new ConfirmDialog confirmDialogOptions
confirmDialog.show()
if window.evnd.searchNoteView? then window.evnd.searchNoteView.cancel()
conflictStatus = note.checkConflict()
unless conflictStatus.unsyncdModificationInAtomEVND
if previewView? and previewView.editor?.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
else
notePath = note.absPath()
for editor in atom.workspace.getTextEditors() when editor.getPath() is notePath
if editor.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
break
if conflictStatus.unsyncdModificationInAtomEVND
detailedMsg = "You can still go ahead and grab content from Evernote, "
detailedMsg += "whether the new content will be put in a new pane or "
detailedMsg += "oevewrite existing content depends on your settings"
detailedMsg += "(EVND will wait for your confirmation to write new "
detailedMsg += "onto disk).\nYour current setting: "
if atom.config.get('ever-notedown.pulledContentInSplitPane')
detailedMsg += "open grabbed content in a separate pane.\n"
else
detailedMsg += "overwrite existing content.\n"
detailedMsg += "You can also make sure that this time the new content "
detailedMsg += "is put into a separate pane.\n\n"
detailedMsg += "Please choose how to proceed: "
atom.confirm
message: "There are changes that have not been sent to Evernote."
detailedMessage: detailedMsg
buttons:
"Cancel": => return #console.log "Cancelled"
"Go ahead": => pullNoteNormal(note, {searchAllPanes: true})
"Put pulled content in a new pane": =>
pullNoteNormal(note, {newPane: true, searchAllPanes: true})
else
pullNoteNormal(note, {searchAllPanes: true})
togglePreview: ->
if window.evnd.init then @loadModule()
if isEVNDPreviewView(atom.workspace.getActivePaneItem())
atom.workspace.destroyActivePaneItem()
return
editor = atom.workspace.getActiveTextEditor()
return unless editor?
grammars = atom.config.get('ever-notedown.grammars') ? []
unless editor.getGrammar().scopeName in grammars
scopeName = editor.getGrammar().scopeName
warningMsg = "Cannot preview this file because grammar '#{scopeName}' isn't supported.\n"
warningMsg += "\n(Current supported grammars set in EVND settings: #{grammars.toString()})"
window.alert(warningMsg)
return
@addPreviewForEditor(editor) unless @removePreviewForEditor(editor)
getPreviewViewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
evndPreviewView = previewPane.itemForURI(uri)
return evndPreviewView if isEVNDPreviewView(evndPreviewView)
return null
uriForEditor: (editor) ->
"ever-notedown-preview://editor/#{editor?.id}"
removePreviewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
previewPane.destroyItem(previewPane.itemForURI(uri))
true
else
false
addPreviewForEditor: (editor, note, callback) ->
uri = @uriForEditor(editor)
previousActivePane = atom.workspace.getActivePane()
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'right'
atom.workspace.open(uri, options).then (evNotedownPreviewView) =>
if isEVNDPreviewView(evNotedownPreviewView)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note ?= noteHelper?.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
evNotedownPreviewView.note = note
evNotedownPreviewView.noteID = note?.id
if note? then evNotedownPreviewView.activateButtons()
previousActivePane.activate()
if callback? then callback(evNotedownPreviewView)
boldText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "**#{selectedText}**", options
emphasisText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "_#{selectedText}_", options
underlineText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<u>#{selectedText}</u>", options
highlightText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<mark>#{selectedText}</mark>", options
strikeThroughText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "~~#{selectedText}~~", options
blockquote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
selectedTextLines = selectedText.toString().split(/[\n\r]/)
for i in [0..selectedTextLines.length-1]
selectedTextLines[i] = "> #{selectedTextLines[i]}"
newText = selectedTextLines.join("\n")
options =
select: true
editor.insertText newText, options
pasteImage: () ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor? and editor? isnt '' and atom.workspace.getActivePane().isFocused()
image = clipboard.readImage()
if not image.isEmpty()
buf = image.toPng()
imgBin = atob(buf.toString('base64'))
timeStr = utils.sanitizeTimeString(utils.getCurrentTimeString())
if window.evnd.storageManager?.gitPath
newPath = path.join(window.evnd.storageManager.gitPath, 'tmp/', "clipboard_#{timeStr}.png")
else
newPath = path.join(atom.getConfigDirPath(), 'evnd/tmp/', "#{timeStr}.png")
fs.writeFileSync(newPath, imgBin, 'binary')
editor.insertText("")
else
filePath = clipboard.readText().trim()
if fs.isFileSync(filePath)
if utils.isImage(filePath)
clipboard.writeText("")
else
clipboard.writeText("!{Alt text}(#{filePath} \"Optional title\")") # Attachment...
else
return
onDrop: (event) ->
utils ?= require './utils'
_ ?= require 'underscore-plus'
path ?= require 'path'
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curPath = editor.getPath()
return unless utils.isMarkdown(curPath)
event.preventDefault()
event.stopPropagation()
pathsToDrop = _.pluck(event.dataTransfer.files, 'path')
# TODO: Pop up warning if there're spaces in filenames!
if pathsToDrop.length > 0
for onePath in pathsToDrop
continue unless onePath?
filename = path.basename(onePath)
if utils.isImage(filename)
attachmentText = " "
else
attachmentText = " !{attachment}(#{onePath} \"#{filename}\") "
editor.insertText(attachmentText)
return
previewFile: ({target}) ->
if window.evnd.init then @loadModule()
filePath = target.dataset.path
return unless filePath
for editor in atom.workspace.getTextEditors() when editor.getPath() is filePath
@addPreviewForEditor(editor)
return
atom.workspace.open "ever-notedown-preview://#{encodeURI(filePath)}",
searchAllPanes: true
saveHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.saveAs()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.saveAs()
else if isEVNDPreviewView(paneItem)
paneItem.saveAs()
copyHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.copyToClipboard()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.copyToClipboard()
else if isEVNDPreviewView(paneItem)
paneItem.copyToClipboard()
getMarkdownScopeName: ->
grammar = @getEVNDGrammar()
scopeName = grammar?.scopeName ? 'source.gfm'
return scopeName
getEVNDGrammarScopeName: ({evndGrammar, mathjax}={})->
scopeNameDict =
litcoffee: 'source.litcoffee'
litcoffeeMathJax: 'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
gfm: 'text.markdown.evnd.source.gfm.inline.html'
gfmMathJax: 'text.markdown.evnd.mathjax.source.gfm.inline.html'
evndGrammar ?= atom.config.get('ever-notedown.evndGrammar')
mathjax ?= atom.config.get('ever-notedown.mathjax')
switch evndGrammar
when 'Extended source.litcoffee'
scopeName = if mathjax then scopeNameDict.litcoffeeMathJax else scopeNameDict.litcoffee
when 'Extended source.gfm'
scopeName = if mathjax then scopeNameDict.gfmMathJax else scopeNameDict.gfm
return scopeName
getEVNDGrammar: ({mathjax}={}) ->
scopeName = @getEVNDGrammarScopeName({mathjax: mathjax})
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar?
return grammar
# grammar doesn't exists?
evndGrammar = atom.config.get('ever-notedown.evndGrammar')
switch evndGrammar
when 'Extended source.litcoffee'
gramamr = atom.grammars.grammarForScopeName('source.litcoffee')
when 'Extended source.gfm'
grammar = atom.grammars.grammarForScopeName('source.gfm')
return gramamr
addInlineHTMLGrammar: ->
inlineHTMLGrammar = atom.grammars.grammarForScopeName('evnd.inline.html')
unless inlineHTMLGrammar?
inlineHTMLGrammarPath = path.join __dirname, 'grammars/', 'evnd-inline-html.cson'
inlineHTMLGrammar = atom.grammars.readGrammarSync inlineHTMLGrammarPath
atom.grammars.addGrammar inlineHTMLGrammar
addEVNDGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = null
when 'Extended source.gfm' then grammarFileName = 'evnd.cson'
if grammarFileName?
@addInlineHTMLGrammar()
evndGrammarPath = path.join __dirname, 'grammars/', grammarFileName
evndGrammar = atom.grammars.readGrammarSync evndGrammarPath
atom.grammars.addGrammar(evndGrammar)
else
evndGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless evndGramamr?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(evndGrammar)
removeEVNDGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
defaultGrammar = atom.grammars.grammarForScopeName('source.gfm')
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(defaultGrammar)
setEVNDGrammar: (editor) ->
return unless editor?
evndGrammar = @getEVNDGrammar({mathjax: false})
if evndGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(evndGrammar)
addMathJaxGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = 'evnd-litcoffee-mathjax.cson'
when 'Extended source.gfm' then grammarFileName = 'evnd-mathjax.cson'
if grammarFileName?
@addInlineHTMLGrammar()
mathjaxGrammarPath = path.join __dirname, 'grammars/', grammarFileName
mathjaxGrammar = atom.grammars.readGrammarSync mathjaxGrammarPath
atom.grammars.addGrammar(mathjaxGrammar)
else
mathjaxGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless mathjaxGrammar?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(mathjaxGrammar)
setMathJaxGrammar: (editor) ->
return unless editor?
mathjaxGrammar = @getEVNDGrammar({mathjax: true})
if mathjaxGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(mathjaxGrammar)
removeMathJaxGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
evndGrammar = @getEVNDGrammar({mathjax: false})
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName?.indexOf('mathjax') > -1
editor.setGrammar(evndGrammar)
switchEVNDGrammar: (newEVNDGrammar, mathjax) ->
mathjax ?= atom.config.get('ever-notedown.mathjax')
newEVNDGrammarScopeName = @getEVNDGrammarScopeName({evndGrammar: newEVNDGrammar, mathjax: mathjax})
newEVNDGrammar = atom.grammars.grammarForScopeName(newEVNDGrammarScopeName)
if not newEVNDGrammar?
if mathjax then @addMathJaxGrammar() else @addEVNDGrammar()
return
else
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
editor.setGrammar(newEVNDGrammar)
loadModule: ->
{TextEditor} = require 'atom' unless TextEditor?
utils ?= require './utils'
CSON ?= require 'season'
fs ?= require 'fs-plus'
path ?= require 'path'
git ?= require 'git-utils'
_ ?= require 'underscore-plus'
evernoteHelper ?= require './evernote-helper'
storage ?= require './storage-manager'
noteHelper ?= require './note-helper'
mathjaxHelper ?= require './mathjax-helper'
{fenceNameForScope} = require './extension-helper' unless fenceNameForScope?
cheerio ?= require 'cheerio'
clipboard ?= require 'clipboard'
url ?= require 'url'
SearchResultListView ?= require './search-result-list-view'
SearchNoteView ?= require './search-note-view'
NoteManagerView ?= require './note-manager-view' # Defer until used
EVNDPreviewView ?= require './ever-notedown-preview-view' # Defer until used
EVNDView ?= require './ever-notedown-view' # Defer until used
renderer ?= require './renderer' # Defer until used
if window.evnd.init
for paneItem in atom.workspace.getPaneItems() when isEVNDPreviewView(paneItem)
paneItem.renderMarkdown()
@loadCSS()
@loadTemplate()
if atom.config.get('ever-notedown.mathjax')
@addMathJaxGrammar()
else
@addEVNDGrammar()
@loadGitRepo null, null, (newStorageManager) =>
window.evnd.storageManager = newStorageManager
window.evnd.svgCollections = {}
window.evnd.newNoteDisposables = {}
window.evnd.gitPath = window.evnd.storageManager.gitPath
window.evnd.gitPathSymlink = window.evnd.storageManager.gitPathSymlink
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
for paneItem in atom.workspace.getPaneItems()
if isEVNDPreviewView(paneItem) and not paneItem.note?
filePath = paneItem.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
if (not paneItem.noteID?) and note?
paneItem.noteID = note.id
paneItem.attachNote(note)
#
# TODO: Implement this!
#
#@subscriptions.add atom.config.observe 'ever-notedown.renderDiagrams', (toRender) =>
# if toRender and not window.evnd.chartsLibsLoaded
# chartsHelper ?= require './charts-helper'
# chartsHelper.loadChartsLibraries()
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPath', (event) =>
newGitPath = event.newValue
reloadGitRepo = =>
@loadGitRepo newGitPath, null, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPath = newGitPath
dmsg = "Changing git repo path for EVND to #{newGitPath}"
if atom.config.get('ever-notedown.gitPathSymlink') then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPath', event.oldValue
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPathSymlink', (event) =>
gitPathSymlink = event.newValue
reloadGitRepo = =>
@loadGitRepo null, gitPathSymlink, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPathSymlink = gitPathSymlink
dmsg = "Changing git repo path for EVND to #{atom.config.get('ever-notedown.gitPath')}"
if gitPathSymlink then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPathSymlink', event.oldValue
@subscriptions.add atom.config.observe 'ever-notedown.noteTemplate', (newTemplateName) =>
@loadTemplate(newTemplateName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.theme', (event) =>
newThemeName = event.newValue
@loadCSS(newThemeName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.syntaxTheme', (event) =>
newSyntaxThemeName = event.newValue
@loadCSS(null, newSyntaxThemeName)
# TODO: ...
@subscriptions.add atom.config.observe 'ever-notedown.mathjax', (mathjax) =>
if mathjax
mathjaxHelper.loadMathJax()
@addMathJaxGrammar()
else
mathjaxHelper.unloadMathJax()
@removeMathJaxGrammar()
@subscriptions.add atom.config.onDidChange 'ever-notedown.evndGrammar', (event) =>
mathjax = atom.config.get('ever-notedown.mathjax')
@switchEVNDGrammar(event.newValue, mathjax)
@subscriptions.add atom.config.observe 'ever-notedown.mathjaxCustomMacros', (customMacros) =>
mathjaxHelper.reconfigureMathJax() # TODO: this isn't working!
@subscriptions.add atom.config.observe 'ever-notedown.sortBy', (sortBy) =>
window.evnd.noteManagerView?.destroy()
window.evnd.noteManagerView = null
window.evnd.searchResultListView?.destroy()
window.evnd.searchResultListView = null
@subscriptions.add atom.workspace.observeTextEditors (editor) =>
if (editor?.getGrammar()?.scopeName in ['source.gfm', 'source.litcoffee']) or
utils.isMarkdown(editor?.getPath?())
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
@subscriptions.add atom.workspace.observeActivePaneItem (activeItem) =>
if activeItem is atom.workspace.getActiveTextEditor() and activeItem?.id
previewView = @getPreviewViewForEditor(activeItem)
if previewView?
editorPane = atom.workspace.paneForItem(activeItem)
previewPane = atom.workspace.paneForItem(previewView)
if previewPane isnt editorPane and
previewPane?.getActiveItem() isnt previewView
previewPane.activateItem(previewView)
window.evnd.init = false
| true | # Used some code from https://github.com/atom/markdown-preview/blob/9ff76ad3f6407a0fb68163a538c6d460280a1718/lib/main.coffee
#
# Reproduced license info:
# Copyright (c) 2014 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
path = require 'path'
{CompositeDisposable, Disposable} = require 'atom'
{File, Directory} = require 'atom'
{$} = require 'atom-space-pen-views'
TextEditor = null
CSON = null
chartsHelper = null
evernoteHelper = null # delay require './evernote-helper'
storage = null # delay require './storage-manager'
noteHelper = null # require './note-helper'
mathjaxHelper = null
utils = null
fenceNameForScope = null #require './extension-helper'
fs = null #require 'fs-plus'
git = null # requrie 'git-utils'
cheerio = null # require 'cheerio'
clipboard = null
_ = null # require 'underscore-plus'
# used some code from atom/markdown-preview/lib/main.coffee
url = null #require 'url'
NoteManagerView = null # Defer until used
EVNDPreviewView = null # Defer until used
EVNDView = null # Defer until used
NoteInfoView = null
ConfirmDialog = null
InfoDialog = null
SearchNoteView = null
SearchResultListView = null
renderer = null # Defer until used
isNoteInfoView = (object) ->
NoteInfoView ?= require './info-dialog'
object instanceof NoteInfoView
isNoteManagerView = (object) ->
NoteManagerView ?= require './note-manager'
object instanceof NoteManagerView
isEVNDView = (object) ->
EVNDView ?= require './ever-notedown-view'
object instanceof EVNDView
createEVNDPreviewView = (state) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
new EVNDPreviewView(state)
isEVNDPreviewView = (object) ->
EVNDPreviewView ?= require './ever-notedown-preview-view'
object instanceof EVNDPreviewView
atom.deserializers.add
name: 'EVNDPreviewView'
deserialize: (state) ->
createEVNDPreviewView(state) if state.constructor is Object
defaultGitPath = path.join atom.getConfigDirPath(), 'evnd/'
themeDict =
"Default": "assets/themes/default/style.css"
"Default 2": "assets/themes/default2/style.css"
"Default 3": "assets/themes/default3/style.css"
"Atom": "assets/themes/atom/style.css"
"Custom 1": "assets/themes/custom1/style.css"
"Custom 2": "assets/themes/custom2/style.css"
syntaxThemeDict =
"Default": "assets/themes-syntax/default/style.css"
"Default 2": "assets/themes-syntax/default2/style.css"
"One Light": "assets/themes-syntax/one-light/style.css"
"One Dark": "assets/themes-syntax/one-dark/style.css"
"Solarized Light": "assets/themes-syntax/solarized-light/style.css"
"Solarized Dark": "assets/themes-syntax/solarized-dark/style.css"
"Github": "assets/themes-syntax/github/style.css"
"Chester": "assets/themes-syntax/chester/style.css"
"Tomorrow": "assets/themes-syntax/tomorrow/style.css"
"IDLE": "assets/themes-syntax/IDLE/style.css"
"Seti Syntax": "assets/themes-syntax/seti-syntax/style.css"
"Cobalt": "assets/themes-syntax/cobalt/style.css"
"Monokai": "assets/themes-syntax/monokai/style.css"
"Serpia": "assets/themes-syntax/serpia/style.css"
"Custom 1": "assets/themes-syntax/custom1/style.css"
"Custom 2": "assets/themes-syntax/custom2/style.css"
noteTemplateDict =
"Default": "assets/templates/default.markdown"
"Lecture Notes": "assets/templates/lecture_notes.markdown"
"None": ""
"Custom 1": "assets/templates/custom1.markdown"
"Custom 2": "assets/templates/custom2.markdown"
evndGrammarList = [
'source.gfm'
'source.litcoffee'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
# Global variables?
window.evnd =
evndView: null
editor: null
searchNoteView: null
searchResultListView: null
noteManagerView: null
cssTheme: ""
cssCode: ""
template: ""
noteIndex: null
storageManager: null
enHelper: null
init: null
chartsLibsLoaded: null
gitPath: null
gitPathSymlink: null
svgCollections: null
newNoteDisposables: null
module.exports =
config:
showPath:
type: 'boolean'
default: true
order: 1
gitPath:
type: 'string'
default: defaultGitPath
description: 'Default location to store your ever-notedown notes, GIT-backed'
order: 2
gitPathSymlink:
type: 'boolean'
default: true
description: 'Check this if the specified gitPath is a symbolic link'
order: 3
openNoteInEvernoteAuto:
title: 'Open Note in Evernote'
type: 'boolean'
default: false
description: "Automatically open note in Evernote client after note creation or modification"
order: 4
pulledContentInSplitPane:
type: 'boolean'
default: false
description: "After loading note content from the Evernote client database, put the loaded content in a separate pane as a new file? (default: false, will overwrite old note content)."
order: 5
sortBy:
type: 'string'
default: 'Title'
enum: ['default', 'Title', 'Notebook', 'Creation Time', 'Modification Time']
description: 'Default sorting is the order in which the notes are displayed in the drop-down note browser'
order: 6
convertHexNCR2String:
title: 'Convert Hex NCR to String'
type: 'boolean'
default: true
description: 'When importing (or pulling) from Evernote, convert hex NCR represented Unicode characters to UTF8 string'
order: 7
defaultFormat:
type: 'string'
default: 'Markdown'
enum: ['Text', 'Markdown', 'HTML']
description: '(Please choose only "Markdown" for now...)'
order: 7
codeSnippet:
type: 'boolean'
default: true
description: 'Render selected content as a fenced code block'
order: 8
toc:
title: 'TOC'
type: 'boolean'
default: true
description: 'Enable Table of Contents generation ([TOC])'
order: 9
checkbox:
type: 'boolean'
default: true
description: 'Render ([ ], [x]) as checkboxes everywhere'
order: 10
footnote:
type: 'boolean'
default: true
description: 'Parse footnotes in MMD style...([^text] for reference, [^text]: for definition)'
order: 11
mathjax:
type: 'boolean'
default: true
description: 'Enable MathJax processing'
order: 12
mathjaxOutput:
type: 'string'
default: 'SVG'
enum: ['SVG'] #['SVG', 'HTML/CSS']
order: 13
mathjaxCustomMacros:
type: 'string'
default: "Physical Sciences"
enum: [
"None",
"Default",
"Physical Sciences",
"Math",
"Custom 1",
"Custom 2"
]
order: 14
description: 'Use custom defined macros (~/.atom/packages/ever-notdown/assets/mathjax/macros/custom.json) for MathJax rendering. (After making changes, please use "View -> Reload" for the change to take effect.)'
breakOnSingleNewline:
type: 'boolean'
default: false
description: 'Markdown rendering option'
order: 15
smartyPants:
type: 'boolean'
default: false
description: 'Use "smart" typograhic punctuation for things like quotes and dashes.'
order: 16
noteTemplate:
type: 'string'
default: 'Default'
description: 'Template for creating new note'
enum: [
"Default",
"Lecture Notes",
"Custom 1",
"Custom 2",
"None"
]
order: 17
theme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"Default 3",
"Atom",
"Custom 1",
"Custom 2"
]
order: 18
syntaxTheme:
type: 'string'
default: "Default"
enum: [
"Default",
"Default 2",
"One Light",
"One Dark",
"Solarized Light",
"Solarized Dark",
"Github",
"Chester",
"Tomorrow",
"IDLE",
"Seti Syntax",
"Cobalt",
"Monokai",
"Serpia",
"Custom 1",
"Custom 2"
]
order: 19
liveUpdate:
type: 'boolean'
default: true
description: 'For Markdown Preview'
order: 20
openPreviewInSplitPane:
type: 'boolean'
default: true
order: 21
syncScroll:
type: 'boolean'
default: true
description: 'Sync scrolling between the editor and the preview pane'
order: 22
grammars:
type: 'array'
default: [
'source.gfm'
'source.litcoffee'
'text.html.basic'
'text.plain'
'text.plain.null-grammar'
'text.markdown.evnd.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
]
order: 23
evndGrammar:
title: 'Extended grammar for syntax highlighting markdown files in editor'
type: 'string'
order: 24
enum: [
'Extended source.litcoffee'
'Extended source.gfm'
]
default: 'Extended source.gfm'
description: 'Support extra syntax highlighting, eg: inline HTML, MathJax equations, etc.'
subscriptions: null
# TODO: This CSS matter... should we just go for "getMarkdownPreviewCSS"?
activate: (state) ->
return unless process.platform is 'darwin' # OSX Only!
window.evnd.init = true
window.evnd.chartsLibsLoaded = false
#console.log atom.config.get('ever-notedown.gitPath')
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
mathjax = atom.config.get('ever-notedown.mathjax')
if mathjax
mathjaxHelper = require('./mathjax-helper')
mathjaxHelper.loadMathJax()
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register commands
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle': =>
@toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-note-list': =>
@createNoteManagerView(state).toggle()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:selection-to-evernote', =>
@sel2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:file-to-evernote', =>
@file2Evernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:pull-current-note-from-evernote', =>
@pullFromEvernote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:import-note-from-evernote', =>
@showImportNotePanel()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:new-note', =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-config', =>
@openConfig()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:open-help-document', =>
@openHelpDoc()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:markdown-quick-ref', =>
@openMarkdownQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:mathjax-quick-ref', =>
@openMathJaxQuickRef()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:notes-for-developers', =>
@openDevNotes()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-preview': =>
@togglePreview()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:copy-rendered-html': =>
@copyHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:save-rendered-html': =>
@saveHtml()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:toggle-break-on-single-newline': =>
keyPath = 'PI:KEY:<KEY>END_PI'
atom.config.set(keyPath, not atom.config.get(keyPath))
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-note-template': =>
@openNewNote()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom1': =>
@editCustomTemplate('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-note-template-custom2': =>
@editCustomTemplate('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-theme-css': =>
@viewThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom1': =>
@editCustomThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-theme-custom2': =>
@editCustomThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-current-syntax-theme-css': =>
@viewSyntaxThemeCSS()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom1': =>
@editCustomSyntaxThemeCSS('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-syntax-theme-custom2': =>
@editCustomSyntaxThemeCSS('Custom 2')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:view-mathjax-macros': =>
@viewMathJaxMacros()
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom1': =>
@editCustomMacros('Custom 1')
@subscriptions.add atom.commands.add 'atom-workspace',
'ever-notedown:edit-macros-custom2': =>
@editCustomMacros('Custom 2')
@subscriptions.add atom.commands.add 'atom-text-editor', 'drop': (event) =>
#console.log 'Dropping item!'
@onDrop(event)
@subscriptions.add atom.commands.add 'atom-text-editor',
'core:paste': (event) =>
#console.log "Pasting stuff!"
event.stopPropagation()
@pasteImage()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:bold-text': =>
@boldText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:emphasis-text': =>
@emphasisText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:underline-text': =>
@underlineText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:highlight-text': =>
@highlightText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:strikethrough-text': =>
@strikeThroughText()
@subscriptions.add atom.commands.add 'atom-text-editor',
'ever-notedown:blockquote': =>
@blockquote()
@subscriptions.add atom.workspace.observePaneItems (item) =>
if isEVNDPreviewView(item)
item.disposables.add item.onDidClickButtonEvernote (editor, previewView) =>
@file2Evernote(editor, previewView)
item.disposables.add item.onDidClickButtonPull (filePath, previewView) =>
@pullFromEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonNewNote =>
@openNewNote()
item.disposables.add item.onDidClickButtonHome =>
@toggle()
item.disposables.add item.onDidClickButtonEye (filePath, previewView) =>
@openNoteInEvernote(null, filePath, previewView)
item.disposables.add item.onDidClickButtonInfo (note) =>
@confirmedNoteItem({note: note})
item.disposables.add item.onDidClickButtonHTML (note) =>
@getNoteHTML({note: note})
item.disposables.add item.onDidClickButtonENML (note) =>
@getNoteENML({note: note})
item.disposables.add item.onDidClickButtonFolder (notePath) =>
@openFinder(notePath)
@subscriptions.add item.disposables
previewFile = @previewFile.bind(this)
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.markdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.md]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkd]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.mkdown]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.ron]',
'ever-notedown:preview-file', previewFile
@subscriptions.add atom.commands.add '.tree-view .file .name[data-name$=\\.txt]',
'ever-notedown:preview-file', previewFile
atom.workspace.addOpener (uriToOpen) ->
try
{protocol, host, pathname} = url.parse(uriToOpen)
catch error
return
return unless protocol is 'ever-notedown-preview:'
try
pathname = decodeURI(pathname) if pathname
catch error
return
if host is 'editor'
createEVNDPreviewView(editorId: pathname.substring(1))
else
createEVNDPreviewView(filePath: pathname)
deactivate: ->
# TODO: manage storage?
#if atom.config.get('ever-notedown.mathjax') then @removeMathJaxGrammar()
@removeEVNDGrammar()
window.evnd.noteIndex?.update()
window.evnd.noteManagerView?.destroy?()
window.evnd.searchResultListView?.destroy?()
window.evnd.evndView?.destroy()
@subscriptions.dispose()
for k, v of window.evnd
if k in ["cssTheme", "cssCode", "template"]
window.evnd[k] = ""
else
window.evnd[k] = null
serialize: ->
noteManagerViewState: window.evnd.noteManagerView?.serialize()
toggle: ->
if window.evnd.init then @loadModule()
unless window.evnd.evndView?
EVNDView ?= require './ever-notedown-view'
newEVNDView = new EVNDView(@)
newEVNDView.disposables.add newEVNDView.onDidClickButtonImportNote =>
@showImportNotePanel()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNewNote =>
newEVNDView.hide()
@openNewNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote =>
@deleteNote()
newEVNDView.disposables.add newEVNDView.onDidClickButtonNoteList =>
@createNoteManagerView(@).toggle()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenConfig =>
@openConfig()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenHelp =>
@openHelpDoc()
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenNote (note) =>
newEVNDView.hide()
@openNote(note)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenFinder (notePath) =>
@openFinder(notePath)
newEVNDView.disposables.add newEVNDView.onDidClickButtonOpenInfo (note) =>
@confirmedNoteItem({note: note})
newEVNDView.disposables.add newEVNDView.onDidClickButtonDeleteNote (note) =>
@deleteNote {note:note}, (deleted) =>
if deleted then newEVNDView.refresh()
newEVNDView.disposables.add newEVNDView.onDidClickButtonExportNote () =>
@saveHtml()
@subscriptions.add newEVNDView.disposables
window.evnd.evndView = newEVNDView
window.evnd.evndView.toggle(@)
#
# Based on the official Atom Markdown Preview package
# Updated Nov 15, 2015
# TODO: move these functions to `ever-notedown-preview-view.coffee`
#
getTextEditorStyles: ->
textEditorStyles = document.createElement("atom-styles")
textEditorStyles.initialize(atom.styles)
textEditorStyles.setAttribute "context", "atom-text-editor"
document.body.appendChild textEditorStyles
# Extract style elements content
Array.prototype.slice.apply(textEditorStyles.childNodes).map (styleElement) ->
styleElement.innerText
# TODO: remove the particular {overflow-y: scroll;}?
getMarkdownPreviewCSS: ->
return @markdownPreviewCSS if @markdownPreviewCSS
markdowPreviewRules = []
ruleRegExp = /\.ever-notedown-preview/
cssUrlRefExp = /url\(atom:\/\/ever-notedown\/assets\/(.*)\)/
for stylesheet in document.styleSheets
if stylesheet.rules?
for rule in stylesheet.rules
# We only need `.ever-notedown-preview` css
markdowPreviewRules.push(rule.cssText) if rule.selectorText?.match(ruleRegExp)?
@markdownPreviewCSS = markdowPreviewRules
.concat(@getTextEditorStyles())
.join('\n')
.replace(/([^\.])atom-text-editor/g, '$1pre.editor-colors') # <atom-text-editor> are now <pre>
.replace(/:host/g, '.host') # Remove shadow-dom :host selector causing problem on FF
.replace cssUrlRefExp, (match, assetsName, offset, string) -> # base64 encode assets
assetPath = path.join __dirname, '../assets', assetsName
originalData = fs.readFileSync assetPath, 'binary'
base64Data = new Buffer(originalData, 'binary').toString('base64')
"url('data:image/jpeg;base64,#{base64Data}')"
@markdownPreviewCSS
editCustomSyntaxThemeCSS: (syntaxThemeName) ->
return unless syntaxThemeName.indexOf('Custom') > -1 and syntaxThemeDict[syntaxThemeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeDict[syntaxThemeName]
atom.workspace.open(syntaxThemeFilePath)
viewSyntaxThemeCSS: ->
@loadCSS() unless window.evnd.cssCode?
syntaxThemeCSS = window.evnd.cssCode
atom.workspace.open('').then (editor) =>
editor.setText(syntaxThemeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
editCustomThemeCSS: (themeName) ->
return unless themeName?.indexOf('Custom') > -1 and themeDict[themeName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeDict[themeName]
atom.workspace.open(themeFilePath)
viewThemeCSS: ->
@loadCSS() unless window.evnd.cssTheme?
themeCSS = window.evnd.cssTheme
atom.workspace.open('').then (editor) =>
editor.setText(themeCSS)
cssGrammar = atom.grammars.grammarForScopeName('source.css')
if cssGrammar then editor.setGrammar(cssGrammar)
loadCSS: (themeName, syntaxThemeName) ->
# Load defined CSS themes
themeName ?= atom.config.get('ever-notedown.theme')
themeFileName = themeDict[themeName]
syntaxThemeName ?= atom.config.get('ever-notedown.syntaxTheme')
syntaxThemeFileName = syntaxThemeDict[syntaxThemeName]
return unless themeFileName? and syntaxThemeFileName?
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
themeFilePath = path.join evndPkgPath, themeFileName
window.evnd.cssTheme = fs.readFileSync(themeFilePath, 'utf8')
syntaxThemeFilePath = path.join evndPkgPath, syntaxThemeFileName
window.evnd.cssCode = fs.readFileSync(syntaxThemeFilePath, 'utf8')
themePath = path.join evndPkgPath, "styles/theme.css"
themeCSS = window.evnd.cssTheme + window.evnd.cssCode
fs.writeFileSync(themePath, themeCSS, 'utf8')
@reloadTheme(themeCSS, {sourcePath: themePath})
return themeCSS
reloadTheme: (source, params) ->
return unless source
#console.log "Reloading css style sheet... #{params.sourcePath}"
sourcePath = params?.sourcePath
sourcePath ?= path.join atom.packages.resolvePackagePath('ever-notedown'), 'styles/theme.css'
priority = params?.priority
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
priority ?= styleElement.priority ? 0
atom.styles.removeStyleElement(styleElement)
#break
params.priority = priority
atom.styles.addStyleSheet(source, params)
@markdownPreviewCSS = null
removeTheme: (sourcePath) ->
return unless sourcePath
#console.log "Removing css style sheet... #{sourcePath}"
styleElements = atom.styles.getStyleElements()
for styleElement in styleElements
if styleElement.sourcePath is sourcePath
atom.styles.removeStyleElement(styleElement)
break
viewTemplate: ->
if window.evnd.init then @loadModule()
template = window.evnd.template ? @loadTemplate()
atom.workspace.open('').then (editor) =>
editor.setText(template)
editCustomTemplate: (templateName) ->
return unless templateName?.indexOf('Custom') > -1 and
noteTemplateDict[templateName]?
if window.evnd.init then @loadModule()
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
atom.workspace.open templateFilePath, {searchAllPanes: true}
loadTemplate: (templateName) ->
evndPkgPath = atom.packages.resolvePackagePath('ever-notedown')
templateName ?= atom.config.get('ever-notedown.noteTemplate')
if templateName is "None"
window.evnd.template = ""
else
templateFilePath = path.join evndPkgPath, noteTemplateDict[templateName]
window.evnd.template = fs.readFileSync(templateFilePath, 'utf8')
return window.evnd.template
viewMathJaxMacros: ->
if window.evnd.init then @loadModule()
unless atom.config.get('ever-notedown.mathjax')
window.alert "MathJax is not enabled currently!"
return
mathjaxHelper ?= require './mathjax-helper'
console.log mathjaxHelper
macros = mathjaxHelper.loadCustomMacros()
console.log macros
atom.workspace.open('').then (editor) =>
editor.setText(mathjaxHelper.macrosToCSONString(macros))
grammar = atom.grammars.grammarForScopeName('source.coffee')
if grammar? then editor.setGrammar(grammar)
editCustomMacros: (macroName) ->
if window.evnd.init then @loadModule()
mathjaxHelper ?= require './mathjax-helper'
return unless macroName?.indexOf('Custom') > -1 and
mathjaxHelper.macroPaths[macroName]?
atom.workspace.open(mathjaxHelper.macroPaths[macroName])
getGitDir: (gitPath, gitPathSymlink) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
if window.evnd.storageManager?.gitDir? and
window.evnd.storageManager.gitPath is gitPath and
window.evnd.storageManager.gitPathSymlink is gitPathSymlink
return window.evnd.storageManager?.gitDir
gitDir = new Directory(gitPath, gitPathSymlink)
return gitDir
getRealGitPath: ->
gitDir = @getGitDir()
return gitDir.getRealPathSync()
loadGitRepo: (gitPath, gitPathSymlink, callback) ->
gitPath ?= atom.config.get('ever-notedown.gitPath')
gitPathSymlink ?= atom.config.get('ever-notedown.gitPathSymlink')
#console.log "Git Path: " + gitPath
storage ?= require './storage-manager'
gitDir = @getGitDir(gitPath, gitPathSymlink)
loadGitRepoNormal = =>
if window.evnd.storageManager?.gitPath is gitPath and
window.evnd.storageManager?.gitPathSymlink is gitPathSymlink and
window.evnd.storageManager?.gitDir?.existsSync()
if window.evnd.storageManager.gitRepo is null
window.evnd.storageManager.initRepo () =>
callback(window.evnd.storageManager)
else
callback(window.evnd.storageManager)
else
storageOptions =
gitPath: gitPath
gitPathSymlink: gitPathSymlink
gitRepo: null
gitDir: gitDir
window.evnd.storageManager ?= new storage.StorageManager(storageOptions)
callback(window.evnd.storageManager)
if not gitDir.existsSync()
dmsg = "The current GIT directory #{gitPath} "
if gitPathSymlink then dmsg += "(symolic link) "
dmsg += "for EVND doesn't exist!"
atom.confirm
message: dmsg
buttons:
"mkdir": =>
@initGitDir gitDir, () =>
loadGitRepoNormal()
"Open Settings": =>
@openConfig()
callback(null)
return
else
loadGitRepoNormal()
initGitDir: (gitDir, callback) ->
gitDir ?= @getGitDir()
if gitDir?.existsSync()
if callback? then callback()
return
atom.confirm
message: "Will create directory at #{gitDir.getRealPathSync()}"
buttons:
"Confirm": =>
fs ?= require 'fs-plus'
fs.makeTreeSync(gitDir.getRealPathSync())
if callback? then callback()
"Cancel": =>
if callback? then callback()
return
initJSONFile: (jsonFile, callback) ->
@initGitDir null, () =>
jsonFile.write("{}")
if callback? then callback()
loadJSON: (callback) ->
path ?= require 'path'
jsonPath = path.join(@getRealGitPath(), "index.json")
jsonFile = new File(jsonPath)
loadJSONNormal = =>
jsonFile.read().then (jsonString) =>
jobj = JSON.parse(jsonString)
noteHelper ?= require './note-helper'
options =
jsonOBJ: jobj
absfilename: jsonPath
file: jsonFile
callback(new noteHelper.NoteIndex(options))
if jsonFile.existsSync()
loadJSONNormal()
else
@initJSONFile jsonFile, () =>
loadJSONNormal()
openConfig: ->
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open 'atom://config/packages/ever-notedown/', options
openHelpDoc: ->
if window.evnd.init then @loadModule()
pathToHelpDoc = path.join __dirname, '../docs/help.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToHelpDoc, options).then (editor) =>
@addPreviewForEditor(editor)
openMarkdownQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openMathJaxQuickRef: ->
if window.evnd.init then @loadModule()
window.alert "Sorry, this function has not yet been implemented... :-/"
openDevNotes: ->
if window.evnd.init then @loadModule()
pathToDevNotes = path.join __dirname, '../docs/dev_notes.md'
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = "left"
atom.workspace.open(pathToDevNotes, options).then (editor) =>
@addPreviewForEditor(editor)
#
# toggle the search panel (similar to find-and-replace)
#
showImportNotePanel: ->
if window.evnd.init then @loadModule()
if window.evnd?.searchNoteView?.panel?
window.evnd.searchNoteView.show()
else
SearchNoteView ?= require './search-note-view'
window.evnd.searchNoteView = new SearchNoteView()
window.evnd.searchNoteView.show()
@subscriptions.add window.evnd.searchNoteView.onDidSearchWithString ({queryString, noteLink}={}) =>
@searchNotes({queryString:queryString, noteLink:noteLink})
#
# Open note list (Scroll List view)
#
createNoteManagerView: (state) ->
if window.evnd.init then @loadModule()
unless window.evnd.noteManagerView?
NoteManagerView ?= require './note-manager-view'
window.evnd.noteManagerView = new NoteManagerView()
@subscriptions.add window.evnd.noteManagerView.onDidConfirmNote (noteID) =>
@confirmedNoteItem({noteID: noteID})
window.evnd.noteManagerView
#
# Import from Evernote?
#
searchNotes: ({queryString, noteLink}={}) ->
if window.evnd.init then @loadModule()
reg0 = /^https\:\/\/www\.evernote\.com\/shard\/([^\s\/]+)\/[^\s\/]+\/([^\s\/]+)\/([^\s\/]+)\/$/i
if noteLink? and reg0.test(noteLink) #noteLink.slice(0, 8) is 'https://'
matched = reg0.exec(noteLink)
noteLink = "evernote:///view/#{matched[2]}/#{matched[1]}/#{matched[3]}/#{matched[3]}/"
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.searchNotes {queryString:queryString, noteLink: noteLink}, (result) =>
if (not result) or (not result.found) or (result? and _.size(result) < 2)
window.alert("No results found!")
return
else
SearchResultListView ?= require './search-result-list-view'
window.evnd.searchResultListView = new SearchResultListView(result)
window.evnd.searchResultListView.show()
@subscriptions.add window.evnd.searchResultListView.onDidConfirmSearchResult (noteLink) =>
@importFromEvernote({noteLink: noteLink})
handleToEvernoteError: (error, noteOptions) ->
message = "#{error.message} when trying to send note to Evernote"
detail = "Note options:\n"
for k, v of noteOptions
continue if k in ["rawHTML", "text", "css"]
detail += " #{k}: #{JSON.stringify(v)}\n"
stack = "#{error.stack}\n"
atom.notifications.addError(message, {stack: stack, detail: detail, dismissable: true})
# TODO: Handles "code snippet"
# TODO: use selection.getScreenRange() (for code annotating?)
#
sel2Evernote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curFilePath = editor.getPath()
lastSelection = editor.getLastSelection()
selectionText = lastSelection.getText().toString() #editor.getSelectedText()
bufferRowRange = lastSelection.getBufferRowRange()
rowRange = "#L#{(bufferRowRange[0]+1).toString()}-#{(bufferRowRange[1]+1).toString()}"
if selectionText.trim().length is 0
window.alert "Nothing selected!"
return
# Convert selected text (Markdown) to HTML
# TODO: if current file is code file (selected text is code snippet), render
# TODO: renderer, async???
textContent = selectionText ? "Nothing here"
parsedInput = utils.parseMetaData(textContent)
newTitle = parsedInput.title
newTextContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
if utils.isMarkdown(curFilePath) or
editor.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
renderOptions = {mathjax: atom.config.get('ever-notedown.mathjax')}
else if atom.config.get('ever-notedown.codeSnippet')
if path.basename(curFilePath)?
newTitle = "Code snippet: #{path.basename(curFilePath)}#{rowRange}"
scopeName = editor.getGrammar()?.scopeName
fenceName = if scopeName? then fenceNameForScope(scopeName) else ""
newTextContent = "```#{fenceName}\n#{newTextContent}\n```\n"
newTextContent += "\n<br><br>**Source file**: #{curFilePath} \n"
newTextContent += "<br>**Clipped Time**: #{utils.getCurrentTimeString()} \n"
textContent = metaText + "\n\n" + newTextContent
renderOptions = {mathjax: false}
else
newHtmlContent = null
noteFormat = "Text"
tmpCss = null
if noteFormat is "Text"
options =
title: newTitle
update: false
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
else
renderer ?= require './renderer'
renderer.toHTML newTextContent, renderOptions.mathjax, editor.getPath(),
parsedInput, editor.getGrammar(), (error, html) =>
if error
console.error('Converting Markdown to HTML failed', error)
return # TODO: notify user
else
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else @loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
noteFormat = "Markdown"
newHtmlContent = html
options =
title: newTitle
update: false
moved: true
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
filePath: curFilePath
renderOptions: renderOptions ? null
try
@toEvernote options, null, (curNote) =>
@openNote(curNote)
catch error
@handleToEvernoteError(error, options)
file2Evernote: (editor, previewView) ->
if window.evnd.init then @loadModule()
if previewView?
testView = previewView
editor ?= previewView.editor
else
testView ?= atom.workspace.getActivePane().getActiveItem()
editor ?= atom.workspace.getActiveTextEditor()
return unless editor? or isEVNDPreviewView(testView)
# update note in Evernote if current file is already in the EVND git repo
if editor?
curFilePath = editor.getPath()
else
editorId = parseInt(testView.editorId)
editor = testView.editor
curFilePath = testView.filePath
if editor?
curFilePath = editor.getPath()
else if curFilePath?
editor = atom.workspace.openSync(curFilePath, {searchAllPanes: true})
return unless curFilePath? and editor?
unless curFilePath?
if editor?
dMsg = "EVND will now try to save it as a new note... please try again later."
atom.notifications.addWarning("File is not yet saved!", {detail: dMsg, dismissable: true})
utils.timeOut(1000)
@saveNewNote(editor)
else
window.alert "File not saved! Cannot send to Evernote... please save first."
return
#if curFilePath.indexOf(atom.config.get('ever-notedown.gitPath')) > -1
gitPath0 = @getRealGitPath()
gitPath1 = atom.config.get('ever-notedown.gitPath')
if curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1
update = true
moved = false
#console.log("Will update this note...")
else
update = false
moved = true
#console.log("Will create a new note...")
textContent = editor.getText()
parsedInput = utils.parseMetaData(textContent)
newTextContent = parsedInput.content
newTitle = parsedInput.title
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
# TODO: Fix Async!!!
if utils.isMarkdown(curFilePath) or
editor?.getGrammar()?.scopeName in evndGrammarList or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'Markdown')
previewView ?= @getPreviewViewForEditor(editor)
unless previewView?
@addPreviewForEditor(editor)
# TODO: notifiy user
dMsg = "Please check the rendered result in preview pane first!\n"
dMsg += "Please close this message, and wait until "
dMsg += "the preview finishes loading before trying again."
#window.alert(dMsg)
atom.notifications.addWarning('Content not rendered!', {detail: dMsg, dismissable: true})
return
if previewView.loading then utils.timeOut(500)
html = previewView[0].innerHTML
# TODO: Need to properly handle CSS selection
tmpCss = if (window.evnd.cssTheme? and window.evnd.cssCode?) then (window.evnd.cssTheme + window.evnd.cssCode) else window.evnd.loadCSS()
#tmpCss = @getMarkdownPreviewCSS()
newHtmlContent = html
noteFormat = "Markdown"
# Send resulting HTML to Evernote Application (create a new note or update)
else if utils.isHTML(curFilePath) or
editor?.getGrammar()?.scopeName in ['text.html.basic'] or
(utils.isText(curFilePath) and
atom.config.get('ever-notedown.defaultFormat') is 'HTML')
newHtmlContent = newTextContent
noteFormat = "HTML"
else # no need to convert
newHtmlContent = null
noteFormat = "Text"
options =
title: newTitle
text: textContent # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
rawHTML: newHtmlContent
css: tmpCss
format: noteFormat
update: update
filePath: curFilePath
renderOptions: {mathjax: atom.config.get('ever-notedown.mathjax')}
options.moved = moved
if not moved
options.path = path.dirname(curFilePath)
options.fnStem = path.basename(curFilePath, path.extname(curFilePath))
# Send content to Evernote Application (create a new note or update)
try
@toEvernote options, previewView, (curNote) =>
if options.moved then @openNote(curNote)
catch error
@handleToEvernoteError(error, options)
# TODO: Open the written file (in the default GIT repo)
# TODO: Async?
if options.moved
for editor in atom.workspace.getTextEditors() when editor.getPath() is curFilePath
@removePreviewForEditor(editor)
else
@addPreviewForEditor(editor)
toEvernote: (options, previewView, callback) ->
evernoteHelper ?= require './evernote-helper'
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
# Send resulting HTML to Evernote Application (create a new note)
# Note: This function contains an async call (osa)
# In the callback function of osa, a global variable should be updated
# TODO: tags, other implicit info encoding, etc.
options.update ?= false
noteHelper ?= require './note-helper'
if options.update
curNote = noteHelper.findNote(window.evnd.noteIndex, {title: options.title, fnStem: path.basename(options.filePath, path.extname(options.filePath)), dir: path.basename(path.dirname(options.filePath))})
if curNote is null
options.update = false
#console.log("Note not found in current note index")
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
else
#console.log("Note found in current note index")
curNote.update window.evnd.storageManager, options
else
switch options.format
when "Markdown" then curNote = new noteHelper.MarkdownNote(options)
when "Text" then curNote = new noteHelper.TextNote(options)
else curNote = new noteHelper.HTMLNote(options)
#console.log("Current Note entity title: " + curNote.title)
window.evnd.noteIndex.addnote(curNote)
# TODO: Async call in storage manager
window.evnd.storageManager.addNote curNote, false, null, () =>
#console.log("Sending to evernote..." + utils.getCurrentTimeString())
unless previewView?
openNoteOptions =
searchAllPanes: true
addPreview: true
@openNote curNote, openNoteOptions, (editor) =>
previewView = @getPreviewViewForEditor(editor)
updateNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.updateNote curNote, curNote.addAttachments, true, (updateSuccess) =>
if updateSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Update Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
#console.log "Update failed!"
window.alert "Update failed!"
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
createNoteNormal = () =>
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'visible'
ensync?.previousSibling.classList.add('faded')
window.evnd.enHelper.createNewNote curNote, (createSuccess) =>
if createSuccess
window.evnd.enHelper.getENML curNote, curNote.queryString, (enml) =>
curNote.update(window.evnd.storageManager, {enml:enml, dontChangeTime:true})
curNote.lastSyncDate = curNote.enModificationDate ? curNote.enCreationDate
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing') ? []
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
ensyncs = previewView?[0].querySelectorAll('#pull-syncing') ? []
for ensync in ensyncs
ensync?.parentNode.parentNode.classList.remove("evnd-yellow")
ensync?.parentNode.parentNode.classList.remove("evnd-red")
gitMessage = "Create new Evernote note \"#{curNote.title}\" success!\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curNote, true, gitMessage
#console.log(gitMessage)
#window.alert(gitMessage.split(/[\n\r]/g)[0])
atom.notifications.addSuccess(gitMessage.split(/[\n\r]/g)[0])
else
window.alert "Something went wrong when trying to create new note..."
ensyncs = previewView?[0].querySelectorAll('#evernote-syncing')
for ensync in ensyncs
ensync?.style.visibility = 'hidden'
ensync?.previousSibling.classList.remove("faded")
saveOnly = () =>
gitMessage = "Locally updated note \"#{curNote.title}\"\n"
gitMessage += "#{curNote.summary()}"
window.evnd.storageManager.addNote curnote. true, gitMessage
#console.log(gitMessage)
window.alert(gitMessage.split(/[\n\r]/g)[0])
if options.update
window.evnd.enHelper.getNoteInfo curNote, null, (enNoteInfo) =>
if enNoteInfo?
#console.log("enNoteInfo: " + JSON.stringify(enNoteInfo, null, 4))
#console.log("curNote.lastSyncDate: " + utils.enDateToTimeString(curNote.lastSyncDate))
#console.log("curNote.modificationTime: " + curNote.modificationTime)
if enNoteInfo.enModificationDate isnt curNote.lastSyncDate
dMsg = "On the Evernote client side, this note was last modified "
dMsg += "on #{utils.enDateToTimeString(enNoteInfo.enModificationDate)}. "
dMsg += "But the last time the local note was in sync with the "
dMsg += "Evernote client was #{utils.enDateToTimeString(curNote.lastSyncDate)}.\n"
dMsg += "The local note was modified on #{curNote.modificationTime}.\n"
dMsg += "If you choose \"Update anyway\", the note content in the "
dMsg += "Evernote database will be overwritten AFTER the note is "
dMsg += "exported (you can find the exported note in the EVND folder).\n"
dMsg += "If you choose \"Save only\", the note content will be "
dMsg += "saved to the local EVND folder (with GIT commit), but "
dMsg += "no info will be sent to the Evernote client."
atom.confirm
message: "Alert: possible conflicts!"
detailedMessage: dMsg
buttons:
"Update anyway": -> updateNoteNormal()
"Save only": -> saveOnly()
"Cancel": -> return #console.log("cancelled update note")
else
lastSyncTime = utils.enDateToTimeString(curNote.lastSyncDate)
tMinStr = utils.timeMin(lastSyncTime, curNote.modificationTime)
#console.log(tMinStr)
if tMinStr isnt curNote.modificationTime
updateNoteNormal()
else
window.alert("Note hasn't changed, nothing to update.")
else # no note info was found
createNoteNormal()
else
createNoteNormal()
if callback? then callback(curNote)
openNewNote: (initText, options, callback) ->
# TODO: Template?
if window.evnd.init then @loadModule()
initText ?= window.evnd.template ? @loadTemplate()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
tmpDirPath = @makeNoteDir()
fs.makeTreeSync(tmpDirPath) unless fs.isDirectorySync(tmpDirPath)
options ?= {}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open('', options).then (editor) =>
if initText then editor.setText(initText)
editorElement = atom.views.getView(editor)
window.evnd.newNoteDisposables[editor.id] = atom.commands.add editorElement,
'core:save': (event) =>
event.stopPropagation()
@saveNewNote(editor, tmpDirPath)
switch atom.config.get('ever-notedown.defaultFormat')
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
if addPreview
@addPreviewForEditor editor, null, (previewView) =>
if callback? then callback(editor)
else if callback?
callback(editor)
makeNoteDir: ->
tmpTimeString = utils.getSanitizedTimeString()
tmpIndex = tmpTimeString.indexOf('_')
tmpDirName = if tmpIndex > -1 then tmpTimeString.slice(0, tmpIndex) else tmpTimeString
gitPath = @getRealGitPath()
tmpDirPath = path.join gitPath, tmpDirName
return tmpDirPath
saveNewNote: (editor, noteDir) ->
noteDir ?= @makeNoteDir()
text = editor.getText()
parsedInput = utils.parseMetaData(text)
title = parsedInput.title
textContent = parsedInput.content
tags = parsedInput.tags
date = parsedInput.date
notebookName = parsedInput.notebook
metaText = parsedInput.metaText
filePath = path.join noteDir, utils.sanitizeFilename(title.toLowerCase()) + ".markdown"
if noteFilePath = atom.showSaveDialogSync(filePath)
options =
title: title
text: text # This will include MetaData section...
tags: tags
notebook: {name: notebookName}
metaDate: date
format: "Markdown"
filePath: noteFilePath
fs.writeFileSync(noteFilePath, text)
window.evnd.newNoteDisposables?[editor.id]?.dispose()
@removePreviewForEditor(editor)
editor.getBuffer().setPath(noteFilePath)
newNote = new noteHelper.MarkdownNote(options)
editor.save()
@addPreviewForEditor(editor, newNote)
gitMessage = "Created new note \"#{title}\" (locally) ...\n"
gitMessage += "#{newNote.summary()}"
window.evnd.storageManager.addNote newNote, true, gitMessage
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
openNote: (note, options, callback) ->
# TODO: What if the current note isn't of format "Markdown"?
#console.log "Opening note..."
absPath = note.absPath()
if options?.addPreview?
addPreview = options.addPreview
delete options.addPreview
else
addPreview = true
options ?= {searchAllPanes: true}
if (not options.split?) and atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.workspace.open(absPath, options).then (editor) =>
switch note.format
when 'Text' then scopeName = 'text.plain'
when 'Markdown' then scopeName = @getMarkdownScopeName()
when 'HTML' then scopeName = 'text.html.basic'
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar? then editor.setGrammar(grammar)
#console.log "Note opened, now dealing with preview..."
if addPreview
@addPreviewForEditor editor, note, (previewView) =>
if callback? then callback(editor)
else
@removePreviewForEditor(editor)
#console.log "Note and preview opened, now handling callback..."
if callback? then callback(editor)
openNoteInEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if previewView?.note?
note = previewView.note
else if previewView?.noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: previewView.noteID})
else if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) > -1 or
filePath.indexOf(gitPath1) > -1
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = null
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openNote note, () =>
#console.log "New note opened in Evernote!"
return
openFinder: (notePath) ->
if window.evnd.init then @loadModule()
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.openFinder notePath, () =>
#console.log "Note directory opened in Finder!"
return
searchOpenedNote: () ->
noteHelper ?= require './note-helper'
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
editor = atom.workspace.getActiveTextEditor()
if editor? and
(editor.getPath().indexOf(gitPath0) > -1 or
editor.getPath().indexOf(gitPath1) > -1)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
else
curView = atom.workspace.getActivePaneItem()
if isEVNDPreviewView(curView)
if curView.editor?
curFilePath = curView.editor.getPath()
else
curFilePath = curView.filePath
if curFilePath? and
(curFilePath.indexOf(gitPath0) > -1 or
curFilePath.indexOf(gitPath1) > -1)
fnStem = path.basename(curFilePath, path.extname(curFilePath))
dir = path.basename(path.dirname(curFilePath))
note = noteHelper.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
return note
getNoteENML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = @searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getENML note, null, (enml) =>
if enml?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(enml)
grammar = atom.grammars.grammarForScopeName('text.xml')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting ENML failed..."
return
getNoteHTML: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getHTML note, null, (html) =>
if html?
tmpDir = note.path
options = {}
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'left'
atom.project.setPaths([tmpDir])
atom.workspace.open('', options).then (editor) =>
editor.setText(html)
grammar = atom.grammars.grammarForScopeName('text.html.basic')
if grammar? then editor.setGrammar(grammar)
return
else
window.alert "Something went wrong and getting HTML failed..."
return
confirmedNoteItem: ({note, noteID}={}) ->
if window.evnd.init then @loadModule()
unless note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else
note = searchOpenedNote()
return unless note?
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.getNoteInfo note, null, (newNoteInfo) =>
if newNoteInfo?
window.evnd.enHelper.getAttachmentsInfo note, newNoteInfo.queryString, (newAttachmentsInfo) =>
InfoDialog ?= require './info-dialog'
infoDialog = new InfoDialog()
infoDialog.addInfo(note, newNoteInfo, newAttachmentsInfo)
infoDialog.show()
infoDialog.disposables.add infoDialog.onDidClickDelete (noteID) =>
@deleteNote({noteID:noteID})
infoDialog.disposables.add infoDialog.onDidOpenNote (noteID) =>
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
@openNote(note)
infoDialog.disposables.add infoDialog.onDidPullNote (noteID) =>
@pullFromEvernote(noteID)
@subscriptions.add infoDialog.disposables
else
window.alert("Note info retrieve error! (Maybe this note has not been sent to Evernote? Or it might have already been deleted in Evernote.)")
@openNote(note)
deleteNote: ({note, noteID, noteTitle}={}, callback) ->
if window.evnd.init then @loadModule()
if not note?
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if noteTitle?
note = noteHelper.findNote(window.evnd.noteIndex, {title: noteTitle})
else
note = @searchOpenedNote()
unless note?
#console.log "No active note (editor or preview) found!"
return
# TODO
confirmedDeleteNote = (note, callback) ->
window.evnd.noteIndex?.removeNote(note)
#console.log "Note #{note.title} deleted..."
for paneItem in atom.workspace.getPaneItems()
if paneItem.getPath? and paneItem.getPath() is note.absPath()
paneItem.destroy()
if callback? then callback(true)
atom.confirm
message: "Confirm: Delete Note \"#{note.title}\"?"
detailedMessage: "This action will remove note \"#{note.title}\" from note Index, but related files will remain on disk for now."
buttons:
"Confirm": => confirmedDeleteNote(note, callback)
"Cancel": =>
#console.log "Cancelled deleting note..."
if callback? then callback(false)
importFromEvernote: ({noteLink} = {}) ->
if window.evnd.init then @loadModule()
return unless noteLink?
note = noteHelper.findNote(window.evnd.noteIndex, {noteLink: noteLink})
if note?
@pullFromEvernote(note.id, note.path, null)
else # Construct a new note entity
# TODO: note format? Markdown? HTML?
window.evnd.enHelper.getNoteInfo null, {noteLink: noteLink}, (noteInfo) =>
enModificationTimeStr = utils.enDateToTimeString(noteInfo.enModificationDate)
noteInfo.creationTime = enModificationTimeStr
noteInfo.modificationTime = enModificationTimeStr
note = new noteHelper.MarkdownNote(noteInfo)
enDest = path.join(note.path, note.fnStem) + "_evernote"
window.evnd.enHelper.retrieveNote noteLink, note.queryString, enDest, () =>
utils.timeOut(200)
if not ("#{enDest}.html/" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.html/")
if not ("#{enDest}.enex" in note.enExportedFiles)
note.enExportedFiles.push("#{enDest}.enex")
gitMessage = "About to import Evernote note \"#{note.title}\" ...\n"
gitMessage += "#{note.summary()}"
window.evnd.storageManager.addNote note, true, gitMessage
@pullFromEvernote(note.id, note.path, null)
pullFromEvernote: (noteID, filePath, previewView) ->
if window.evnd.init then @loadModule()
if noteID?
note = noteHelper.findNote(window.evnd.noteIndex, {id: noteID})
else if filePath?
gitPath0 = atom.config.get('ever-notedown.gitPath')
gitPath1 = @getRealGitPath()
if filePath.indexOf(gitPath0) or filePath.indexOf(gitPath1)
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
else
note = @searchedOpenedNote()
unless note?
window.alert("No opened note found!")
return
pullNoteNormal = (note, options) =>
window.evnd.enHelper ?= new evernoteHelper.EvernoteHelper()
window.evnd.enHelper.pullNote note, (updated, textContent, html, newNoteInfo) =>
#console.log "Note pulled..."
if not updated
@openNote note, null, () =>
window.alert("Nothing unsync'd! Opening note...")
return
else
openNoteOptions = {addPreview: true}
if options?.newPane or atom.config.get('ever-notedown.pulledContentInSplitPane')
openNoteOptions.addPreview = false
@openNote note, options, () =>
textContent = note.metaTextFromNoteInfo(newNoteInfo) + textContent
for editor in atom.workspace.getTextEditors() when editor.getPath() is note.absPath()
oldText = editor.getText()
if openNoteOptions.addPreview
editor.setText(textContent)
else
openNewNoteOptions = {addPreview:false, split: "right", activatePane: true}
visibleScreenRowRange = editor.getVisibleRowRange()
@openNewNote textContent, openNewNoteOptions, (newEditor) =>
row1 = visibleScreenRowRange[0]
row2 = visibleScreenRowRange[1]
try
newEditor.scrollToScreenPosition [parseInt((row1 + row2)/2), 0], {center: true}
catch e
console.log e
break
if openNoteOptions.addPreview
ConfirmDialog ?= require './confirm-dialog'
confirmDialogOptions =
editorId: editor.id
filePath: editor.getPath()
note: note
oldText: oldText
newText: textContent
newNoteInfo: newNoteInfo
confirmDialog = new ConfirmDialog confirmDialogOptions
confirmDialog.show()
if window.evnd.searchNoteView? then window.evnd.searchNoteView.cancel()
conflictStatus = note.checkConflict()
unless conflictStatus.unsyncdModificationInAtomEVND
if previewView? and previewView.editor?.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
else
notePath = note.absPath()
for editor in atom.workspace.getTextEditors() when editor.getPath() is notePath
if editor.isModified()
conflictStatus.unsyncdModificationInAtomEVND = true
break
if conflictStatus.unsyncdModificationInAtomEVND
detailedMsg = "You can still go ahead and grab content from Evernote, "
detailedMsg += "whether the new content will be put in a new pane or "
detailedMsg += "oevewrite existing content depends on your settings"
detailedMsg += "(EVND will wait for your confirmation to write new "
detailedMsg += "onto disk).\nYour current setting: "
if atom.config.get('ever-notedown.pulledContentInSplitPane')
detailedMsg += "open grabbed content in a separate pane.\n"
else
detailedMsg += "overwrite existing content.\n"
detailedMsg += "You can also make sure that this time the new content "
detailedMsg += "is put into a separate pane.\n\n"
detailedMsg += "Please choose how to proceed: "
atom.confirm
message: "There are changes that have not been sent to Evernote."
detailedMessage: detailedMsg
buttons:
"Cancel": => return #console.log "Cancelled"
"Go ahead": => pullNoteNormal(note, {searchAllPanes: true})
"Put pulled content in a new pane": =>
pullNoteNormal(note, {newPane: true, searchAllPanes: true})
else
pullNoteNormal(note, {searchAllPanes: true})
togglePreview: ->
if window.evnd.init then @loadModule()
if isEVNDPreviewView(atom.workspace.getActivePaneItem())
atom.workspace.destroyActivePaneItem()
return
editor = atom.workspace.getActiveTextEditor()
return unless editor?
grammars = atom.config.get('ever-notedown.grammars') ? []
unless editor.getGrammar().scopeName in grammars
scopeName = editor.getGrammar().scopeName
warningMsg = "Cannot preview this file because grammar '#{scopeName}' isn't supported.\n"
warningMsg += "\n(Current supported grammars set in EVND settings: #{grammars.toString()})"
window.alert(warningMsg)
return
@addPreviewForEditor(editor) unless @removePreviewForEditor(editor)
getPreviewViewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
evndPreviewView = previewPane.itemForURI(uri)
return evndPreviewView if isEVNDPreviewView(evndPreviewView)
return null
uriForEditor: (editor) ->
"ever-notedown-preview://editor/#{editor?.id}"
removePreviewForEditor: (editor) ->
uri = @uriForEditor(editor)
previewPane = atom.workspace.paneForURI(uri)
if previewPane?
previewPane.destroyItem(previewPane.itemForURI(uri))
true
else
false
addPreviewForEditor: (editor, note, callback) ->
uri = @uriForEditor(editor)
previousActivePane = atom.workspace.getActivePane()
options =
searchAllPanes: true
if atom.config.get('ever-notedown.openPreviewInSplitPane')
options.split = 'right'
atom.workspace.open(uri, options).then (evNotedownPreviewView) =>
if isEVNDPreviewView(evNotedownPreviewView)
filePath = editor.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note ?= noteHelper?.findNote(window.evnd?.noteIndex, {fnStem: fnStem, dir: dir})
evNotedownPreviewView.note = note
evNotedownPreviewView.noteID = note?.id
if note? then evNotedownPreviewView.activateButtons()
previousActivePane.activate()
if callback? then callback(evNotedownPreviewView)
boldText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "**#{selectedText}**", options
emphasisText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "_#{selectedText}_", options
underlineText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<u>#{selectedText}</u>", options
highlightText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "<mark>#{selectedText}</mark>", options
strikeThroughText: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
options =
select: true
editor.insertText "~~#{selectedText}~~", options
blockquote: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor?
selectedText = editor.getSelectedText()
selectedTextLines = selectedText.toString().split(/[\n\r]/)
for i in [0..selectedTextLines.length-1]
selectedTextLines[i] = "> #{selectedTextLines[i]}"
newText = selectedTextLines.join("\n")
options =
select: true
editor.insertText newText, options
pasteImage: () ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
return unless editor? and editor? isnt '' and atom.workspace.getActivePane().isFocused()
image = clipboard.readImage()
if not image.isEmpty()
buf = image.toPng()
imgBin = atob(buf.toString('base64'))
timeStr = utils.sanitizeTimeString(utils.getCurrentTimeString())
if window.evnd.storageManager?.gitPath
newPath = path.join(window.evnd.storageManager.gitPath, 'tmp/', "clipboard_#{timeStr}.png")
else
newPath = path.join(atom.getConfigDirPath(), 'evnd/tmp/', "#{timeStr}.png")
fs.writeFileSync(newPath, imgBin, 'binary')
editor.insertText("")
else
filePath = clipboard.readText().trim()
if fs.isFileSync(filePath)
if utils.isImage(filePath)
clipboard.writeText("")
else
clipboard.writeText("!{Alt text}(#{filePath} \"Optional title\")") # Attachment...
else
return
onDrop: (event) ->
utils ?= require './utils'
_ ?= require 'underscore-plus'
path ?= require 'path'
editor = atom.workspace.getActiveTextEditor()
return unless editor?
curPath = editor.getPath()
return unless utils.isMarkdown(curPath)
event.preventDefault()
event.stopPropagation()
pathsToDrop = _.pluck(event.dataTransfer.files, 'path')
# TODO: Pop up warning if there're spaces in filenames!
if pathsToDrop.length > 0
for onePath in pathsToDrop
continue unless onePath?
filename = path.basename(onePath)
if utils.isImage(filename)
attachmentText = " "
else
attachmentText = " !{attachment}(#{onePath} \"#{filename}\") "
editor.insertText(attachmentText)
return
previewFile: ({target}) ->
if window.evnd.init then @loadModule()
filePath = target.dataset.path
return unless filePath
for editor in atom.workspace.getTextEditors() when editor.getPath() is filePath
@addPreviewForEditor(editor)
return
atom.workspace.open "ever-notedown-preview://#{encodeURI(filePath)}",
searchAllPanes: true
saveHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.saveAs()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.saveAs()
else if isEVNDPreviewView(paneItem)
paneItem.saveAs()
copyHtml: ->
if window.evnd.init then @loadModule()
editor = atom.workspace.getActiveTextEditor()
paneItem = atom.workspace.getActivePaneItem()
return unless editor? or isEVNDPreviewView(paneItem)
if editor?
previewView = @getPreviewViewForEditor(editor)
if previewView?
previewView?.copyToClipboard()
else
@addPreviewForEditor editor, null, (previewView) ->
#previewView = @getPreviewViewForEditor(editor)
previewView?.copyToClipboard()
else if isEVNDPreviewView(paneItem)
paneItem.copyToClipboard()
getMarkdownScopeName: ->
grammar = @getEVNDGrammar()
scopeName = grammar?.scopeName ? 'source.gfm'
return scopeName
getEVNDGrammarScopeName: ({evndGrammar, mathjax}={})->
scopeNameDict =
litcoffee: 'source.litcoffee'
litcoffeeMathJax: 'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
gfm: 'text.markdown.evnd.source.gfm.inline.html'
gfmMathJax: 'text.markdown.evnd.mathjax.source.gfm.inline.html'
evndGrammar ?= atom.config.get('ever-notedown.evndGrammar')
mathjax ?= atom.config.get('ever-notedown.mathjax')
switch evndGrammar
when 'Extended source.litcoffee'
scopeName = if mathjax then scopeNameDict.litcoffeeMathJax else scopeNameDict.litcoffee
when 'Extended source.gfm'
scopeName = if mathjax then scopeNameDict.gfmMathJax else scopeNameDict.gfm
return scopeName
getEVNDGrammar: ({mathjax}={}) ->
scopeName = @getEVNDGrammarScopeName({mathjax: mathjax})
grammar = atom.grammars.grammarForScopeName(scopeName)
if grammar?
return grammar
# grammar doesn't exists?
evndGrammar = atom.config.get('ever-notedown.evndGrammar')
switch evndGrammar
when 'Extended source.litcoffee'
gramamr = atom.grammars.grammarForScopeName('source.litcoffee')
when 'Extended source.gfm'
grammar = atom.grammars.grammarForScopeName('source.gfm')
return gramamr
addInlineHTMLGrammar: ->
inlineHTMLGrammar = atom.grammars.grammarForScopeName('evnd.inline.html')
unless inlineHTMLGrammar?
inlineHTMLGrammarPath = path.join __dirname, 'grammars/', 'evnd-inline-html.cson'
inlineHTMLGrammar = atom.grammars.readGrammarSync inlineHTMLGrammarPath
atom.grammars.addGrammar inlineHTMLGrammar
addEVNDGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = null
when 'Extended source.gfm' then grammarFileName = 'evnd.cson'
if grammarFileName?
@addInlineHTMLGrammar()
evndGrammarPath = path.join __dirname, 'grammars/', grammarFileName
evndGrammar = atom.grammars.readGrammarSync evndGrammarPath
atom.grammars.addGrammar(evndGrammar)
else
evndGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless evndGramamr?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(evndGrammar)
removeEVNDGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
'text.markdown.evnd.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
defaultGrammar = atom.grammars.grammarForScopeName('source.gfm')
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(defaultGrammar)
setEVNDGrammar: (editor) ->
return unless editor?
evndGrammar = @getEVNDGrammar({mathjax: false})
if evndGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(evndGrammar)
addMathJaxGrammar: ->
switch atom.config.get('ever-notedown.evndGrammar')
when 'Extended source.litcoffee' then grammarFileName = 'evnd-litcoffee-mathjax.cson'
when 'Extended source.gfm' then grammarFileName = 'evnd-mathjax.cson'
if grammarFileName?
@addInlineHTMLGrammar()
mathjaxGrammarPath = path.join __dirname, 'grammars/', grammarFileName
mathjaxGrammar = atom.grammars.readGrammarSync mathjaxGrammarPath
atom.grammars.addGrammar(mathjaxGrammar)
else
mathjaxGrammar = atom.grammars.grammarForScopeName('source.gfm')
unless mathjaxGrammar?
return
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editor.getGrammar()?.scopeName in evndGrammarList or
(editorPath? and utils.isMarkdown(editorPath))
editor.setGrammar(mathjaxGrammar)
setMathJaxGrammar: (editor) ->
return unless editor?
mathjaxGrammar = @getEVNDGrammar({mathjax: true})
if mathjaxGrammar? and editor?.getGrammar()?.scopeName in evndGrammarList
editor.setGrammar(mathjaxGrammar)
removeMathJaxGrammar: ->
grammarsToRemove = [
'text.markdown.evnd.mathjax.source.litcoffee.inline.html'
'text.markdown.evnd.mathjax.source.gfm.inline.html'
]
for scopeName in grammarsToRemove
atom.grammars.removeGrammarForScopeName(scopeName)
evndGrammar = @getEVNDGrammar({mathjax: false})
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
if editorPath? and editor.getGrammar()?.scopeName?.indexOf('mathjax') > -1
editor.setGrammar(evndGrammar)
switchEVNDGrammar: (newEVNDGrammar, mathjax) ->
mathjax ?= atom.config.get('ever-notedown.mathjax')
newEVNDGrammarScopeName = @getEVNDGrammarScopeName({evndGrammar: newEVNDGrammar, mathjax: mathjax})
newEVNDGrammar = atom.grammars.grammarForScopeName(newEVNDGrammarScopeName)
if not newEVNDGrammar?
if mathjax then @addMathJaxGrammar() else @addEVNDGrammar()
return
else
for editor in atom.workspace.getTextEditors()
editorPath = editor.getPath()
editor.setGrammar(newEVNDGrammar)
loadModule: ->
{TextEditor} = require 'atom' unless TextEditor?
utils ?= require './utils'
CSON ?= require 'season'
fs ?= require 'fs-plus'
path ?= require 'path'
git ?= require 'git-utils'
_ ?= require 'underscore-plus'
evernoteHelper ?= require './evernote-helper'
storage ?= require './storage-manager'
noteHelper ?= require './note-helper'
mathjaxHelper ?= require './mathjax-helper'
{fenceNameForScope} = require './extension-helper' unless fenceNameForScope?
cheerio ?= require 'cheerio'
clipboard ?= require 'clipboard'
url ?= require 'url'
SearchResultListView ?= require './search-result-list-view'
SearchNoteView ?= require './search-note-view'
NoteManagerView ?= require './note-manager-view' # Defer until used
EVNDPreviewView ?= require './ever-notedown-preview-view' # Defer until used
EVNDView ?= require './ever-notedown-view' # Defer until used
renderer ?= require './renderer' # Defer until used
if window.evnd.init
for paneItem in atom.workspace.getPaneItems() when isEVNDPreviewView(paneItem)
paneItem.renderMarkdown()
@loadCSS()
@loadTemplate()
if atom.config.get('ever-notedown.mathjax')
@addMathJaxGrammar()
else
@addEVNDGrammar()
@loadGitRepo null, null, (newStorageManager) =>
window.evnd.storageManager = newStorageManager
window.evnd.svgCollections = {}
window.evnd.newNoteDisposables = {}
window.evnd.gitPath = window.evnd.storageManager.gitPath
window.evnd.gitPathSymlink = window.evnd.storageManager.gitPathSymlink
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
for paneItem in atom.workspace.getPaneItems()
if isEVNDPreviewView(paneItem) and not paneItem.note?
filePath = paneItem.getPath()
fnStem = path.basename(filePath, path.extname(filePath))
dir = path.basename(path.dirname(filePath))
note = noteHelper.findNote(window.evnd.noteIndex, {fnStem: fnStem, dir: dir})
if (not paneItem.noteID?) and note?
paneItem.noteID = note.id
paneItem.attachNote(note)
#
# TODO: Implement this!
#
#@subscriptions.add atom.config.observe 'ever-notedown.renderDiagrams', (toRender) =>
# if toRender and not window.evnd.chartsLibsLoaded
# chartsHelper ?= require './charts-helper'
# chartsHelper.loadChartsLibraries()
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPath', (event) =>
newGitPath = event.newValue
reloadGitRepo = =>
@loadGitRepo newGitPath, null, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPath = newGitPath
dmsg = "Changing git repo path for EVND to #{newGitPath}"
if atom.config.get('ever-notedown.gitPathSymlink') then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPath', event.oldValue
@subscriptions.add atom.config.onDidChange 'ever-notedown.gitPathSymlink', (event) =>
gitPathSymlink = event.newValue
reloadGitRepo = =>
@loadGitRepo null, gitPathSymlink, (newStorageManager) =>
if newStorageManager?
window.evnd.storageManager = newStorageManager
@loadJSON (newNoteIndex) =>
window.evnd.noteIndex = newNoteIndex
if window.evnd.evndView? then window.evnd.evndView.refresh()
window.evnd.gitPathSymlink = gitPathSymlink
dmsg = "Changing git repo path for EVND to #{atom.config.get('ever-notedown.gitPath')}"
if gitPathSymlink then dmsg += " (symbolic link)"
atom.confirm
message: dmsg + "?"
buttons:
"Confirm": => reloadGitRepo()
"Cancel": => return
"Revert": =>
atom.config.set 'ever-notedown.gitPathSymlink', event.oldValue
@subscriptions.add atom.config.observe 'ever-notedown.noteTemplate', (newTemplateName) =>
@loadTemplate(newTemplateName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.theme', (event) =>
newThemeName = event.newValue
@loadCSS(newThemeName)
@subscriptions.add atom.config.onDidChange 'ever-notedown.syntaxTheme', (event) =>
newSyntaxThemeName = event.newValue
@loadCSS(null, newSyntaxThemeName)
# TODO: ...
@subscriptions.add atom.config.observe 'ever-notedown.mathjax', (mathjax) =>
if mathjax
mathjaxHelper.loadMathJax()
@addMathJaxGrammar()
else
mathjaxHelper.unloadMathJax()
@removeMathJaxGrammar()
@subscriptions.add atom.config.onDidChange 'ever-notedown.evndGrammar', (event) =>
mathjax = atom.config.get('ever-notedown.mathjax')
@switchEVNDGrammar(event.newValue, mathjax)
@subscriptions.add atom.config.observe 'ever-notedown.mathjaxCustomMacros', (customMacros) =>
mathjaxHelper.reconfigureMathJax() # TODO: this isn't working!
@subscriptions.add atom.config.observe 'ever-notedown.sortBy', (sortBy) =>
window.evnd.noteManagerView?.destroy()
window.evnd.noteManagerView = null
window.evnd.searchResultListView?.destroy()
window.evnd.searchResultListView = null
@subscriptions.add atom.workspace.observeTextEditors (editor) =>
if (editor?.getGrammar()?.scopeName in ['source.gfm', 'source.litcoffee']) or
utils.isMarkdown(editor?.getPath?())
if atom.config.get('ever-notedown.mathjax')
@setMathJaxGrammar(editor)
else
@setEVNDGrammar(editor)
@subscriptions.add atom.workspace.observeActivePaneItem (activeItem) =>
if activeItem is atom.workspace.getActiveTextEditor() and activeItem?.id
previewView = @getPreviewViewForEditor(activeItem)
if previewView?
editorPane = atom.workspace.paneForItem(activeItem)
previewPane = atom.workspace.paneForItem(previewView)
if previewPane isnt editorPane and
previewPane?.getActiveItem() isnt previewView
previewPane.activateItem(previewView)
window.evnd.init = false
|
[
{
"context": "17b532362fc3149e4211ade14c9b2\"\n \"http://127.0.0.1\": \"40cd4860f7988c53b15b8491693de133\"\n ",
"end": 2323,
"score": 0.5149668455123901,
"start": 2322,
"tag": "IP_ADDRESS",
"value": "2"
},
{
"context": "b532362fc3149e4211ade14c9b2\"\n \"http://1... | spec/communicator_spec.coffee | aerobicio/garmin-communicator | 0 | {Communicator} = require('../src/communicator')
{Plugin} = require('../src/plugin')
{Device} = require('../src/device')
describe 'Communicator', ->
beforeEach ->
@pluginIsInstalledStub = sinon.stub(Plugin.prototype, 'pluginIsInstalled')
afterEach ->
@pluginIsInstalledStub.restore()
describe '#invoke', ->
beforeEach ->
@communicator = Communicator.get()
it 'calls the function on the pluginProxy', ->
@communicator.pluginProxy.derp = sinon.stub()
@communicator.invoke('derp')
expect(@communicator.pluginProxy.derp.calledOnce).to.equal true
it 'throws an error if the method name does not exist', ->
expect(=> @communicator.invoke('foobar')).to.throw Error
it 'throws an error if the method name is not a function of the pluginProxy', ->
@communicator.pluginProxy.derp = 'I am not a function'
expect(=> @communicator.invoke('derp')).to.throw Error
describe '#busy', ->
beforeEach ->
@communicator = Communicator.get()
it 'is not busy by default', ->
expect(@communicator.busy()).to.equal false
it 'returns the current state of the property, or sets it', ->
@communicator.busy(yes)
expect(@communicator.busy()).to.equal true
@communicator.busy(no)
expect(@communicator.busy()).to.equal false
expect(@communicator.busy(yes)).to.equal true
describe '#isLocked', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'returns true if the plugin is locked', ->
@communicator.pluginProxy.Locked = true
expect(@communicator.isLocked()).to.equal true
it 'returns false if the plugin is unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.isLocked()).to.equal false
describe '#unlock', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'does nothing if already unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.unlock()).to.equal undefined
describe 'unlocking the plugin', ->
beforeEach ->
@unlockCodes =
"file:///": "cb1492ae040612408d87cc53e3f7ff3c"
"http://localhost": "45517b532362fc3149e4211ade14c9b2"
"http://127.0.0.1": "40cd4860f7988c53b15b8491693de133"
@communicator.pluginProxy.Locked = true
@communicator.pluginProxy.Unlock = sinon.stub().returns(false)
@communicator.pluginProxy.Unlock.withArgs("file:///", "cb1492ae040612408d87cc53e3f7ff3c").returns(true)
it 'returns true when it unlocks the plugin successfully', ->
unlocked = @communicator.unlock(@unlockCodes)
expect(unlocked).to.equal true
describe '#devices', ->
beforeEach ->
@clock = sinon.useFakeTimers()
@communicator = Communicator.get()
# mock out the plugin interface
pluginProxy = {
StartFindDevices: -> return
FinishFindDevices: -> return
DevicesXmlString: -> return
DeviceDescription: -> return
}
@startFindDevicesStub = sinon.stub(pluginProxy, 'StartFindDevices')
@finishFindDevicesStub = sinon.stub(pluginProxy, 'FinishFindDevices')
@devicesXmlStringStub = sinon.stub(pluginProxy, 'DevicesXmlString').returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
"""
@communicator.pluginProxy = pluginProxy
afterEach ->
@communicator = Communicator.destroy()
@clock.restore()
@startFindDevicesStub.restore()
@finishFindDevicesStub.restore()
@devicesXmlStringStub.restore()
@communicator = null
it 'it will unlock the communicator if it is not already unlocked', ->
sinon.stub(@communicator, 'isLocked').returns true
sinon.stub(@communicator, 'busy').returns false
unlockStub = sinon.stub(@communicator, 'unlock')
@communicator.devices()
expect(unlockStub.calledOnce).to.equal true
it 'returns a promise', ->
@finishFindDevicesStub.returns true
subject = @communicator.devices()
expect(subject? and _(subject).isObject() and subject.isFulfilled?).to.equal true
it 'marks the communicator as being busy', ->
@communicator.devices()
expect(@communicator.busy()).to.equal true
it 'marks the communicator as being inactive once the promise is called', (done) ->
@finishFindDevicesStub.returns false
# When the promise is resolved then it should no longer be busy.
promise = @communicator.devices().finally =>
expect(@communicator.busy()).to.equal false
done()
expect(@communicator.busy()).to.equal true
# Ensure that the promise resolves.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
it 'will keeping checking until the communicator is finished loading every 100ms', (done) ->
@finishFindDevicesStub.returns false
loopUntilFinishedFindingDevicesSpy = sinon.spy(@communicator, '_loopUntilFinishedFindingDevices')
promise = @communicator.devices().finally -> done()
expect(loopUntilFinishedFindingDevicesSpy.calledOnce).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledTwice).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledThrice).to.equal true
# Ensure the promise is kept.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
describe 'when the plugin is already busy', ->
it 'does nothing', ->
sinon.stub(@communicator, 'busy').returns true
subject = @communicator.devices()
expect(subject).to.equal undefined
describe 'no devices found', ->
beforeEach ->
@devicesXmlStringStub.returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Devices>
</Devices>
"""
@finishFindDevicesStub.returns false
it 'returns an empty array if there are no devices found', (done) ->
promise = @communicator.devices().then (data) =>
expect(data).to.be.empty
done()
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
# it 'returns an array of devices', (done) ->
# @devicesXmlStringStub.returns """
# <?xml version="1.0" encoding="UTF-8" standalone="no" ?>
# <Devices>
# <Device DisplayName="Edge 500" Number="0"/>
# <Device DisplayName="Edge 510" Number="1"/>
# <Device DisplayName="Garmin Swim" Number="2"/>
# </Devices>
# """
# @finishFindDevicesStub.returns false
# promise = @communicator.devices().then (data) =>
# expect(data.length).to.equal 3
# expect(data[0].name).to.equal "Edge 500"
# expect(data[0].number).to.equal 0
# expect(data[1].name).to.equal "Edge 510"
# expect(data[1].number).to.equal 1
# expect(data[2].name).to.equal "Garmin Swim"
# expect(data[2].number).to.equal 2
# done()
# @finishFindDevicesStub.returns true
# @clock.tick(100)
# promise
| 12369 | {Communicator} = require('../src/communicator')
{Plugin} = require('../src/plugin')
{Device} = require('../src/device')
describe 'Communicator', ->
beforeEach ->
@pluginIsInstalledStub = sinon.stub(Plugin.prototype, 'pluginIsInstalled')
afterEach ->
@pluginIsInstalledStub.restore()
describe '#invoke', ->
beforeEach ->
@communicator = Communicator.get()
it 'calls the function on the pluginProxy', ->
@communicator.pluginProxy.derp = sinon.stub()
@communicator.invoke('derp')
expect(@communicator.pluginProxy.derp.calledOnce).to.equal true
it 'throws an error if the method name does not exist', ->
expect(=> @communicator.invoke('foobar')).to.throw Error
it 'throws an error if the method name is not a function of the pluginProxy', ->
@communicator.pluginProxy.derp = 'I am not a function'
expect(=> @communicator.invoke('derp')).to.throw Error
describe '#busy', ->
beforeEach ->
@communicator = Communicator.get()
it 'is not busy by default', ->
expect(@communicator.busy()).to.equal false
it 'returns the current state of the property, or sets it', ->
@communicator.busy(yes)
expect(@communicator.busy()).to.equal true
@communicator.busy(no)
expect(@communicator.busy()).to.equal false
expect(@communicator.busy(yes)).to.equal true
describe '#isLocked', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'returns true if the plugin is locked', ->
@communicator.pluginProxy.Locked = true
expect(@communicator.isLocked()).to.equal true
it 'returns false if the plugin is unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.isLocked()).to.equal false
describe '#unlock', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'does nothing if already unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.unlock()).to.equal undefined
describe 'unlocking the plugin', ->
beforeEach ->
@unlockCodes =
"file:///": "cb1492ae040612408d87cc53e3f7ff3c"
"http://localhost": "45517b532362fc3149e4211ade14c9b2"
"http://127.0.0.1": "40cd4860f7988c53b15b8491693de133"
@communicator.pluginProxy.Locked = true
@communicator.pluginProxy.Unlock = sinon.stub().returns(false)
@communicator.pluginProxy.Unlock.withArgs("file:///", "cb1492ae040612408d87cc53e3f7ff3c").returns(true)
it 'returns true when it unlocks the plugin successfully', ->
unlocked = @communicator.unlock(@unlockCodes)
expect(unlocked).to.equal true
describe '#devices', ->
beforeEach ->
@clock = sinon.useFakeTimers()
@communicator = Communicator.get()
# mock out the plugin interface
pluginProxy = {
StartFindDevices: -> return
FinishFindDevices: -> return
DevicesXmlString: -> return
DeviceDescription: -> return
}
@startFindDevicesStub = sinon.stub(pluginProxy, 'StartFindDevices')
@finishFindDevicesStub = sinon.stub(pluginProxy, 'FinishFindDevices')
@devicesXmlStringStub = sinon.stub(pluginProxy, 'DevicesXmlString').returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
"""
@communicator.pluginProxy = pluginProxy
afterEach ->
@communicator = Communicator.destroy()
@clock.restore()
@startFindDevicesStub.restore()
@finishFindDevicesStub.restore()
@devicesXmlStringStub.restore()
@communicator = null
it 'it will unlock the communicator if it is not already unlocked', ->
sinon.stub(@communicator, 'isLocked').returns true
sinon.stub(@communicator, 'busy').returns false
unlockStub = sinon.stub(@communicator, 'unlock')
@communicator.devices()
expect(unlockStub.calledOnce).to.equal true
it 'returns a promise', ->
@finishFindDevicesStub.returns true
subject = @communicator.devices()
expect(subject? and _(subject).isObject() and subject.isFulfilled?).to.equal true
it 'marks the communicator as being busy', ->
@communicator.devices()
expect(@communicator.busy()).to.equal true
it 'marks the communicator as being inactive once the promise is called', (done) ->
@finishFindDevicesStub.returns false
# When the promise is resolved then it should no longer be busy.
promise = @communicator.devices().finally =>
expect(@communicator.busy()).to.equal false
done()
expect(@communicator.busy()).to.equal true
# Ensure that the promise resolves.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
it 'will keeping checking until the communicator is finished loading every 100ms', (done) ->
@finishFindDevicesStub.returns false
loopUntilFinishedFindingDevicesSpy = sinon.spy(@communicator, '_loopUntilFinishedFindingDevices')
promise = @communicator.devices().finally -> done()
expect(loopUntilFinishedFindingDevicesSpy.calledOnce).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledTwice).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledThrice).to.equal true
# Ensure the promise is kept.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
describe 'when the plugin is already busy', ->
it 'does nothing', ->
sinon.stub(@communicator, 'busy').returns true
subject = @communicator.devices()
expect(subject).to.equal undefined
describe 'no devices found', ->
beforeEach ->
@devicesXmlStringStub.returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Devices>
</Devices>
"""
@finishFindDevicesStub.returns false
it 'returns an empty array if there are no devices found', (done) ->
promise = @communicator.devices().then (data) =>
expect(data).to.be.empty
done()
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
# it 'returns an array of devices', (done) ->
# @devicesXmlStringStub.returns """
# <?xml version="1.0" encoding="UTF-8" standalone="no" ?>
# <Devices>
# <Device DisplayName="Edge 500" Number="0"/>
# <Device DisplayName="Edge 510" Number="1"/>
# <Device DisplayName="<NAME>" Number="2"/>
# </Devices>
# """
# @finishFindDevicesStub.returns false
# promise = @communicator.devices().then (data) =>
# expect(data.length).to.equal 3
# expect(data[0].name).to.equal "Edge 500"
# expect(data[0].number).to.equal 0
# expect(data[1].name).to.equal "Edge 510"
# expect(data[1].number).to.equal 1
# expect(data[2].name).to.equal "<NAME>"
# expect(data[2].number).to.equal 2
# done()
# @finishFindDevicesStub.returns true
# @clock.tick(100)
# promise
| true | {Communicator} = require('../src/communicator')
{Plugin} = require('../src/plugin')
{Device} = require('../src/device')
describe 'Communicator', ->
beforeEach ->
@pluginIsInstalledStub = sinon.stub(Plugin.prototype, 'pluginIsInstalled')
afterEach ->
@pluginIsInstalledStub.restore()
describe '#invoke', ->
beforeEach ->
@communicator = Communicator.get()
it 'calls the function on the pluginProxy', ->
@communicator.pluginProxy.derp = sinon.stub()
@communicator.invoke('derp')
expect(@communicator.pluginProxy.derp.calledOnce).to.equal true
it 'throws an error if the method name does not exist', ->
expect(=> @communicator.invoke('foobar')).to.throw Error
it 'throws an error if the method name is not a function of the pluginProxy', ->
@communicator.pluginProxy.derp = 'I am not a function'
expect(=> @communicator.invoke('derp')).to.throw Error
describe '#busy', ->
beforeEach ->
@communicator = Communicator.get()
it 'is not busy by default', ->
expect(@communicator.busy()).to.equal false
it 'returns the current state of the property, or sets it', ->
@communicator.busy(yes)
expect(@communicator.busy()).to.equal true
@communicator.busy(no)
expect(@communicator.busy()).to.equal false
expect(@communicator.busy(yes)).to.equal true
describe '#isLocked', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'returns true if the plugin is locked', ->
@communicator.pluginProxy.Locked = true
expect(@communicator.isLocked()).to.equal true
it 'returns false if the plugin is unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.isLocked()).to.equal false
describe '#unlock', ->
beforeEach ->
@communicator = Communicator.get()
@communicator.pluginProxy = {}
it 'does nothing if already unlocked', ->
@communicator.pluginProxy.Locked = false
expect(@communicator.unlock()).to.equal undefined
describe 'unlocking the plugin', ->
beforeEach ->
@unlockCodes =
"file:///": "cb1492ae040612408d87cc53e3f7ff3c"
"http://localhost": "45517b532362fc3149e4211ade14c9b2"
"http://127.0.0.1": "40cd4860f7988c53b15b8491693de133"
@communicator.pluginProxy.Locked = true
@communicator.pluginProxy.Unlock = sinon.stub().returns(false)
@communicator.pluginProxy.Unlock.withArgs("file:///", "cb1492ae040612408d87cc53e3f7ff3c").returns(true)
it 'returns true when it unlocks the plugin successfully', ->
unlocked = @communicator.unlock(@unlockCodes)
expect(unlocked).to.equal true
describe '#devices', ->
beforeEach ->
@clock = sinon.useFakeTimers()
@communicator = Communicator.get()
# mock out the plugin interface
pluginProxy = {
StartFindDevices: -> return
FinishFindDevices: -> return
DevicesXmlString: -> return
DeviceDescription: -> return
}
@startFindDevicesStub = sinon.stub(pluginProxy, 'StartFindDevices')
@finishFindDevicesStub = sinon.stub(pluginProxy, 'FinishFindDevices')
@devicesXmlStringStub = sinon.stub(pluginProxy, 'DevicesXmlString').returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
"""
@communicator.pluginProxy = pluginProxy
afterEach ->
@communicator = Communicator.destroy()
@clock.restore()
@startFindDevicesStub.restore()
@finishFindDevicesStub.restore()
@devicesXmlStringStub.restore()
@communicator = null
it 'it will unlock the communicator if it is not already unlocked', ->
sinon.stub(@communicator, 'isLocked').returns true
sinon.stub(@communicator, 'busy').returns false
unlockStub = sinon.stub(@communicator, 'unlock')
@communicator.devices()
expect(unlockStub.calledOnce).to.equal true
it 'returns a promise', ->
@finishFindDevicesStub.returns true
subject = @communicator.devices()
expect(subject? and _(subject).isObject() and subject.isFulfilled?).to.equal true
it 'marks the communicator as being busy', ->
@communicator.devices()
expect(@communicator.busy()).to.equal true
it 'marks the communicator as being inactive once the promise is called', (done) ->
@finishFindDevicesStub.returns false
# When the promise is resolved then it should no longer be busy.
promise = @communicator.devices().finally =>
expect(@communicator.busy()).to.equal false
done()
expect(@communicator.busy()).to.equal true
# Ensure that the promise resolves.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
it 'will keeping checking until the communicator is finished loading every 100ms', (done) ->
@finishFindDevicesStub.returns false
loopUntilFinishedFindingDevicesSpy = sinon.spy(@communicator, '_loopUntilFinishedFindingDevices')
promise = @communicator.devices().finally -> done()
expect(loopUntilFinishedFindingDevicesSpy.calledOnce).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledTwice).to.equal true
@clock.tick(100)
expect(loopUntilFinishedFindingDevicesSpy.calledThrice).to.equal true
# Ensure the promise is kept.
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
describe 'when the plugin is already busy', ->
it 'does nothing', ->
sinon.stub(@communicator, 'busy').returns true
subject = @communicator.devices()
expect(subject).to.equal undefined
describe 'no devices found', ->
beforeEach ->
@devicesXmlStringStub.returns """
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Devices>
</Devices>
"""
@finishFindDevicesStub.returns false
it 'returns an empty array if there are no devices found', (done) ->
promise = @communicator.devices().then (data) =>
expect(data).to.be.empty
done()
@finishFindDevicesStub.returns true
@clock.tick(100)
promise
# it 'returns an array of devices', (done) ->
# @devicesXmlStringStub.returns """
# <?xml version="1.0" encoding="UTF-8" standalone="no" ?>
# <Devices>
# <Device DisplayName="Edge 500" Number="0"/>
# <Device DisplayName="Edge 510" Number="1"/>
# <Device DisplayName="PI:NAME:<NAME>END_PI" Number="2"/>
# </Devices>
# """
# @finishFindDevicesStub.returns false
# promise = @communicator.devices().then (data) =>
# expect(data.length).to.equal 3
# expect(data[0].name).to.equal "Edge 500"
# expect(data[0].number).to.equal 0
# expect(data[1].name).to.equal "Edge 510"
# expect(data[1].number).to.equal 1
# expect(data[2].name).to.equal "PI:NAME:<NAME>END_PI"
# expect(data[2].number).to.equal 2
# done()
# @finishFindDevicesStub.returns true
# @clock.tick(100)
# promise
|
[
{
"context": "###\ncollectionCtrl.coffee\nCopyright (C) 2015 ender xu <xuender@gmail.com>\n\nDistributed under terms of t",
"end": 53,
"score": 0.9997056722640991,
"start": 45,
"tag": "NAME",
"value": "ender xu"
},
{
"context": "ollectionCtrl.coffee\nCopyright (C) 2015 ender xu <xuen... | ma/collectionCtrl.coffee | xuender/mgoAdmin | 0 | ###
collectionCtrl.coffee
Copyright (C) 2015 ender xu <xuender@gmail.com>
Distributed under terms of the MIT license.
###
CollectionCtrl = ($scope, $log, $http, $modal, $routeParams, ngTableParams, $filter)->
$log.debug 'collection'
$scope.db = $routeParams.db
$scope.collection = $routeParams.collection
$scope.names = []
$scope.$watch('names', (n, o)->
$log.debug n
$log.debug $scope.$$childHead.$columns
ret = (i)->
m = n[i]
->
m
$scope.$$childHead.$columns = []
for i in [0..n.length-1]
$scope.$$childHead.$columns.push(
id: i
filter: ->
False
title: ret(i)
sortable: ret(i)
show: ->
true
)
$scope.$$childHead.$columns.push(
id: n.length
filter: ->
False
title: ->
''
sortable: ->
''
show: ->
true
)
$log.debug $scope.$$childHead.$columns
,true)
$scope.edit = (data)->
i = $modal.open(
templateUrl: '/partials/edit.html'
controller: 'EditCtrl'
backdrop: 'static'
keyboard: true
size: 'lg'
resolve:
data: ->
angular.copy data
)
i.result.then((data)->
$log.debug data
$http.put("/collection/#{ $scope.db }/#{ $scope.collection }",
ok: true
data: data
).success((msg)->
$log.debug msg
if msg.ok
$scope.tableParams.reload()
else
alert(msg.err)
)
,->
$log.debug 'cancel'
)
$scope.show = (d)->
if angular.isObject(d) and not angular.isArray(d)
return 'object'
if angular.isArray(d)
return 'array'
d
$scope.tableParams = new ngTableParams(
page: 1
count: 10
,
getData: ($defer, params)->
$http.post('/collection',
db: $scope.db
collection: $scope.collection
page: params.page()
limit: params.count()
sorting: params.orderBy()
filter: params.filter()
).success((msg)->
$log.debug msg
if msg.ok
params.total(msg.data.count)
$scope.names = msg.data.names
$defer.resolve(msg.data.results)
else
alert(msg.err)
)
)
CollectionCtrl.$inject = [
'$scope'
'$log'
'$http'
'$modal'
'$routeParams'
'ngTableParams'
'$filter'
]
| 165172 | ###
collectionCtrl.coffee
Copyright (C) 2015 <NAME> <<EMAIL>>
Distributed under terms of the MIT license.
###
CollectionCtrl = ($scope, $log, $http, $modal, $routeParams, ngTableParams, $filter)->
$log.debug 'collection'
$scope.db = $routeParams.db
$scope.collection = $routeParams.collection
$scope.names = []
$scope.$watch('names', (n, o)->
$log.debug n
$log.debug $scope.$$childHead.$columns
ret = (i)->
m = n[i]
->
m
$scope.$$childHead.$columns = []
for i in [0..n.length-1]
$scope.$$childHead.$columns.push(
id: i
filter: ->
False
title: ret(i)
sortable: ret(i)
show: ->
true
)
$scope.$$childHead.$columns.push(
id: n.length
filter: ->
False
title: ->
''
sortable: ->
''
show: ->
true
)
$log.debug $scope.$$childHead.$columns
,true)
$scope.edit = (data)->
i = $modal.open(
templateUrl: '/partials/edit.html'
controller: 'EditCtrl'
backdrop: 'static'
keyboard: true
size: 'lg'
resolve:
data: ->
angular.copy data
)
i.result.then((data)->
$log.debug data
$http.put("/collection/#{ $scope.db }/#{ $scope.collection }",
ok: true
data: data
).success((msg)->
$log.debug msg
if msg.ok
$scope.tableParams.reload()
else
alert(msg.err)
)
,->
$log.debug 'cancel'
)
$scope.show = (d)->
if angular.isObject(d) and not angular.isArray(d)
return 'object'
if angular.isArray(d)
return 'array'
d
$scope.tableParams = new ngTableParams(
page: 1
count: 10
,
getData: ($defer, params)->
$http.post('/collection',
db: $scope.db
collection: $scope.collection
page: params.page()
limit: params.count()
sorting: params.orderBy()
filter: params.filter()
).success((msg)->
$log.debug msg
if msg.ok
params.total(msg.data.count)
$scope.names = msg.data.names
$defer.resolve(msg.data.results)
else
alert(msg.err)
)
)
CollectionCtrl.$inject = [
'$scope'
'$log'
'$http'
'$modal'
'$routeParams'
'ngTableParams'
'$filter'
]
| true | ###
collectionCtrl.coffee
Copyright (C) 2015 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Distributed under terms of the MIT license.
###
CollectionCtrl = ($scope, $log, $http, $modal, $routeParams, ngTableParams, $filter)->
$log.debug 'collection'
$scope.db = $routeParams.db
$scope.collection = $routeParams.collection
$scope.names = []
$scope.$watch('names', (n, o)->
$log.debug n
$log.debug $scope.$$childHead.$columns
ret = (i)->
m = n[i]
->
m
$scope.$$childHead.$columns = []
for i in [0..n.length-1]
$scope.$$childHead.$columns.push(
id: i
filter: ->
False
title: ret(i)
sortable: ret(i)
show: ->
true
)
$scope.$$childHead.$columns.push(
id: n.length
filter: ->
False
title: ->
''
sortable: ->
''
show: ->
true
)
$log.debug $scope.$$childHead.$columns
,true)
$scope.edit = (data)->
i = $modal.open(
templateUrl: '/partials/edit.html'
controller: 'EditCtrl'
backdrop: 'static'
keyboard: true
size: 'lg'
resolve:
data: ->
angular.copy data
)
i.result.then((data)->
$log.debug data
$http.put("/collection/#{ $scope.db }/#{ $scope.collection }",
ok: true
data: data
).success((msg)->
$log.debug msg
if msg.ok
$scope.tableParams.reload()
else
alert(msg.err)
)
,->
$log.debug 'cancel'
)
$scope.show = (d)->
if angular.isObject(d) and not angular.isArray(d)
return 'object'
if angular.isArray(d)
return 'array'
d
$scope.tableParams = new ngTableParams(
page: 1
count: 10
,
getData: ($defer, params)->
$http.post('/collection',
db: $scope.db
collection: $scope.collection
page: params.page()
limit: params.count()
sorting: params.orderBy()
filter: params.filter()
).success((msg)->
$log.debug msg
if msg.ok
params.total(msg.data.count)
$scope.names = msg.data.names
$defer.resolve(msg.data.results)
else
alert(msg.err)
)
)
CollectionCtrl.$inject = [
'$scope'
'$log'
'$http'
'$modal'
'$routeParams'
'ngTableParams'
'$filter'
]
|
[
{
"context": "out\": \"logout\"\n \"reset_password/:token\": \"reset_password\"\n \"reset_password\": \"reset_password\"\n \"chan",
"end": 3541,
"score": 0.7309353351593018,
"start": 3533,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "/:token\": \"reset_password\"\... | _attachments/app/Router.coffee | jongoz/coconut-analytice | 0 | _ = require 'underscore'
$ = jQuery = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $
global.moment = require 'moment'
DashboardView = require './views/DashboardView'
MenuView = require './views/MenuView'
HeaderView = require './views/HeaderView'
UsersView = require './views/UsersView'
DateSelectorView = require './views/DateSelectorView'
IssuesView = require './views/IssuesView'
IssueView = require './views/IssueView'
global.Case = require './models/Case'
CaseView = require './views/CaseView'
DataExportView = require './views/DataExportView'
MapView = require './views/MapView'
FacilityHierarchyView = require './views/FacilityHierarchyView'
RainfallStationView = require './views/RainfallStationView'
GeoHierarchyView = require './views/GeoHierarchyView'
Dhis2View = require './views/Dhis2View'
SystemSettingsView = require './views/SystemSettingsView'
LoginView = require './views/LoginView'
ChangePasswdView = require './views/ChangePasswdView'
User = require './models/User'
Dialog = require './views/Dialog'
MessagingView = require './views/MessagingView'
FindCaseView = require './views/FindCaseView'
Graphs = require './models/Graphs'
GraphView = require './views/GraphView'
IndividualsView = require './views/IndividualsView'
CasesView = require './views/CasesView'
EntomologyInvestigationsView = require './views/EntomologyInvestigationsView'
EntomologySpecimensView = require './views/EntomologySpecimensView'
EntomologyDashboardView = require './views/EntomologyDashboardView'
Test1View = require './views/Test1View'
# This allows us to create new instances of these dynamically based on the URL, for example:
# /reports/Analysis will lead to:
# new reportViews[type]() or new reportView["Analysis"]()
#
#AnalysisView = require './views/AnalysisView'
reportViews = {
"Analysis": require './views/AnalysisView'
"Casefollowup": require './views/CaseFollowupView'
"Individualclassification": require './views/IndividualClassificationView'
"Fociclassification": require './views/FociClassificationView'
"Compareweekly": require './views/CompareWeeklyView'
"Epidemicthreshold": require './views/EpidemicThresholdView'
"Systemerrors": require './views/SystemErrorsView'
"Incidentsgraph": require './views/IncidentsGraphView'
"Periodtrends": require './views/PeriodTrendsView'
"Rainfallreport": require './views/RainfallReportView'
"Usersreport": require './views/UsersReportView'
"WeeklyMeetingReport": require './views/WeeklyMeetingReportView'
"WeeklyFacilityReports": require './views/WeeklyFacilityReportsView'
"CleaningReports": require './views/CleaningReportsView'
"Weeklysummary": require './views/WeeklySummaryView'
"test": require './views/Test1View'
}
activityViews = {
Issues: require './views/IssuesView'
Messaging: require './views/MessagingView'
}
class Router extends Backbone.Router
# caches views
views: {}
# holds option pairs for more complex URLs like for reports
reportViewOptions: {}
activityViewOptions: {}
dateSelectorOptions: {}
noLogin = ["login", "logout", "reset_password"]
execute: (callback, args, name) ->
if noLogin.indexOf(name) is -1
@userLoggedIn
success: =>
args.push(@parseQueryString(args.pop())) if args[0] isnt null
callback.apply(this, args) if (callback)
error: =>
@loginFailed()
else
callback.apply(this, args) if callback
routes:
"": "dashboard"
"login": "login"
"logout": "logout"
"reset_password/:token": "reset_password"
"reset_password": "reset_password"
"change_password": "change_password"
"admin/dhis2": "dhis2"
"admin/system_settings": "systemSettings"
"admin/users": "users"
"admin/facilities": "FacilityHierarchy"
"admin/rainfall_station": "rainfallStation"
"admin/geo_hierarchy": "geoHierarchy"
"dashboard": "dashboard"
"dashboard/*options": "dashboard"
"export": "dataExport"
"export/*options": "dataExport"
"maps": "maps"
"maps/*options": "maps"
"graph/*options": "graph"
"individuals": "individuals"
"individuals/*options": "individuals"
"cases": "cases"
"cases/*options": "cases"
"reports": "reports"
"reports/*options": "reports" ##reports/type/Analysis/startDate/2016-01-01/endDate/2016-01-01 ->
"find/case": "findCase"
"find/case/:caseID": "findCase"
"show/case/:caseID": "showCase"
"show/cases/:caseID": "showCase"
"show/case/:caseID/:docID": "showCase"
"delete/result/:resultId": "deleteResult"
"new/issue": "newIssue"
"show/issue/:issueID": "showIssue"
"activities": "activities"
"activities/*options": "activities"
"entomology_dashboard": "entomologyDashboard"
"entomology_dashboard/*options": "entomologyDashboard"
"entomology_investigations": "entomologyInvestigations"
"entomology_investigations/*options": "entomologyInvestigations"
"entomology_specimens": "entomologySpecimens"
"entomology_specimens/*options": "entomologySpecimens"
"Test": "Test"
"*noMatch": "noMatch"
entomologyDashboard: (optionString) =>
Coconut.entomologyDashboardView = new EntomologyDashboardView()
Coconut.entomologyDashboardView.setElement $("#content")
Coconut.entomologyDashboardView.options = @parseOptionsString(optionString)
Coconut.entomologyDashboardView.render()
entomologyInvestigations: (optionString) =>
Coconut.entomologyInvestigationsView = new EntomologyInvestigationsView()
Coconut.entomologyInvestigationsView.setElement $("#content")
Coconut.entomologyInvestigationsView.options = @parseOptionsString(optionString)
Coconut.entomologyInvestigationsView.render()
entomologySpecimens: (optionString) =>
Coconut.entomologySpecimensView = new EntomologySpecimensView()
Coconut.entomologySpecimensView.setElement $("#content")
Coconut.entomologySpecimensView.options = @parseOptionsString(optionString)
Coconut.entomologySpecimensView.render()
findCase: (caseId) =>
Coconut.findCaseView or= new FindCaseView()
Coconut.findCaseView.setElement $("#content")
Coconut.findCaseView.caseId = caseId
Coconut.findCaseView.render()
deleteResult: (resultId) =>
if confirm "Are you sure you want to delete #{resultId}"
adminPassword = prompt "Enter the database admin password:"
adminDatabase = new PouchDB (await Coconut.database.info()).host,
"auth.username": "admin"
"auth.password": adminDatabase
adminDatabase.get(resultId)
.catch (error) => alert error
.then (result) =>
adminDatabase.destroy(result)
.catch (error) => alert error
.then =>
alert("#{resultId} deleted")
Coconut.router.navigate("#", {trigger:true})
initialize: (appView) ->
@appView = appView
noMatch: =>
console.error "Invalid URL, no matching route: "
$("#content").html "Page not found."
login: ->
Coconut.loginView = new LoginView() if !Coconut.loginView
Coconut.loginView.render()
@listenTo(Coconut.loginView, "success", ->
HTMLHelpers.showBackground('show')
Coconut.router.navigate("#dashboard", {trigger: true})
)
logout: ->
User.logout()
$("span#username").html ""
@login()
loginFailed: ->
Coconut.router.navigate("#login", {trigger: true})
change_password: ->
Coconut.changePasswdView = new ChangePasswdView() if !Coconut.changePasswdView
Coconut.changePasswdView.render()
@listenTo(Coconut.changePasswdView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password has been updated...", 'Password Reset',['Ok'])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#dashboard", {trigger: true})
)
reset_password: (token) ->
$("#login-backgrd").show()
if token
#TODO: Need to search for document with the specified token.
#check if token exist.
# User.checkToken
#if found()
#username should come from the doc with the specified token. Temporarily set to 'test'
username = 'test'
Coconut.ChangePasswordView = new ChangePasswordView() if !Coconut.ChangePasswordView
Coconut.ChangePasswordView.render(username)
@listenTo(Coconut.ChangePasswordView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password reset successful...", "Success",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
)
else
Dialog.createDialogWrap()
Dialog.confirm("Invalid Token or Token expired.", "Error",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
notAdmin: ->
if !(Coconut.currentUser)
@loginFailed()
else
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"]) if(Coconut.currentUser)
reports: (options) =>
# Allows us to get name/value pairs from URL
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
document.title = 'Coconut Surveillance - Reports - #{type}'
@views[type] = new reportViews[type]() unless @views[type]
@views[type].setElement "#content"
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'reports'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
# Needs to refactor later to keep it DRY
activities: (options) =>
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@views[type] = new activityViews[type]() unless @views[type]
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'activities'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
graph: (optionString) ->
document.title = 'Coconut Surveillance - Graph'
Coconut.graphView or= new GraphView()
Coconut.graphView.options = @parseOptionsString(optionString)
Coconut.graphView.render()
individuals: (optionString) ->
document.title = 'Coconut Surveillance - Individuals'
Coconut.individualsView or= new IndividualsView()
Coconut.individualsView.options = @parseOptionsString(optionString)
Coconut.individualsView.render()
cases: (optionString) ->
document.title = 'Coconut Surveillance - Cases'
Coconut.casesView or= new CasesView()
Coconut.casesView.options = @parseOptionsString(optionString)
Coconut.casesView.render()
showCase: (caseID, docID) ->
document.title = "Coconut Surveillance - Case #{caseID}"
Coconut.caseView ?= new CaseView()
Coconut.caseView.case = new Case
caseID: caseID
Coconut.caseView.case.fetch
success: ->
Coconut.caseView.render(docID)
error: (error) ->
alert "Could not display case: #{error}"
dashboard: (options) =>
document.title = 'Coconut Surveillance - Dashboard'
Coconut.dashboardView or= new DashboardView()
options = @parseOptionsString(options)
Coconut.dashboardView.startDate = options?.startDate or Coconut.dashboardView.startDate or @defaultStartDate()
Coconut.dashboardView.endDate = options?.endDate or Coconut.dashboardView.endDate or @defaultEndDate()
Coconut.dashboardView.administrativeLevel = options?.administrativeLevel or Coconut.dashboardView.administrativeLevel or "NATIONAL"
# Just maps different terms to the ones used by dashboard
Coconut.dashboardView.administrativeLevel = {
"FACILITY": "HEALTH FACILITIES"
"DISTRICT": "DISTRICTS"
"SHEHIA": "SHEHIAS"
}[Coconut.dashboardView.administrativeLevel.toUpperCase()] or Coconut.dashboardView.administrativeLevel
Coconut.dashboardView.administrativeName = options?.administrativeName or Coconut.dashboardView.administrativeName or "ZANZIBAR"
console.log Coconut.dashboardView
Coconut.dashboardView.render()
dataExport: =>
[startDate,endDate] = @setStartEndDateIfMissing()
@dataExportView = new DataExportView unless @dataExportView
@dataExportView.startDate = startDate
@dataExportView.endDate = endDate
#@dataExportView.render()
@appView.showView(@dataExportView)
@reportType = 'export'
@showDateFilter(@dataExportView.startDate,@dataExportView.endDate, @dataExportView, @reportType)
maps: (options) =>
document.title = 'Coconut Surveillance - Maps'
options = _(options?.split(/\//)).map (option) -> unescape(option)
# remove type option
options.splice(0,2)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@mapView = new MapView unless @mapView
@mapView.setElement "#content"
#@mapView.render()
@appView.showView(@mapView)
@reportType = 'maps'
dateSelectorView = new DateSelectorView()
dateSelectorView.setElement('#date-selector')
dateSelectorView.reportType = 'maps'
dateSelectorView.render()
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @mapView, @reportType)
HTMLHelpers.ChangeTitle("Maps")
FacilityHierarchy: =>
@adminLoggedIn
success: =>
@facilityHierarchyView = new FacilityHierarchyView unless @facilityHierarchyView
#@facilityHierarchyView.render()
@appView.showView(@facilityHierarchyView)
error: =>
@notAdmin()
rainfallStation: =>
@adminLoggedIn
success: =>
@rainfallStationView = new RainfallStationView unless @rainfallStationView
#@rainfallStationView.render()
@appView.showView(@rainfallStationView)
error: =>
@notAdmin()
geoHierarchy: =>
@adminLoggedIn
success: =>
@geoHierarchyView = new GeoHierarchyView unless @geoHierarchyView
#@geoHierarchyView.render()
@appView.showView(@geoHierarchyView)
error: =>
@notAdmin()
shehiasHighRisk: =>
@adminLoggedIn
success: =>
@shehiasHighRiskView = new ShehiasHighRiskView unless @shehiasHighRiskView
#@shehiasHighRiskView.render()
@appView.showView(@shehiasHighRiskView)
error: =>
@notAdmin()
users: () =>
@adminLoggedIn
success: =>
@usersView = new UsersView() unless @usersView
#@usersView.render()
@appView.showView(@usersView)
error: =>
@notAdmin()
dhis2: () =>
@adminLoggedIn
success: =>
@dhis2View = new Dhis2View() unless @dhis2View
#@dhis2View.render()
@appView.showView(@dhis2View)
error: =>
@notAdmin()
systemSettings: () =>
@adminLoggedIn
success: =>
@systemSettingsView = new SystemSettingsView unless @systemSettingsView
#@systemSettingsView.render()
@appView.showView(@systemSettingsView)
error: =>
@notAdmin()
newIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.issueView.issue = null
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
showIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.database.get issueID
.catch (error) ->
console.error error
.then (result) =>
if(result)
Coconut.issueView.issue = result
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
else
Dialog.createDialogWrap()
Dialog.confirm("Issue not found: <br />#{issueID}", "Database Error",["Ok"])
userLoggedIn: (callback) =>
User.isAuthenticated
success: (user) =>
if Coconut.currentUser.isAdmin() then $("#admin-main").show() else $("#admin-main").hide()
callback.success(user)
error: (error) ->
callback.error()
adminLoggedIn: (callback) ->
@userLoggedIn
success: (user) =>
if user.isAdmin()
callback.success(user)
else
$("#drawer-admin, #admin-main").hide()
$("#content").html "
<dialog id='dialog'>
<div id='dialogContent'> </div>
</dialog>
"
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"])
error: =>
callback.error()
defaultStartDate: =>
moment().subtract(1,'week').startOf('isoWeek').format("YYYY-MM-DD")
defaultEndDate: =>
moment().subtract(1,'week').endOf('isoWeek').format("YYYY-MM-DD")
setStartEndDateIfMissing: (startDate,endDate) =>
startDate = Coconut.router.reportViewOptions.startDate || @defaultStartDate()
endDate = Coconut.router.reportViewOptions.endDate || @defaultEndDate()
[startDate, endDate]
showDateFilter: (startDate, endDate, reportView, reportType) ->
Coconut.dateSelectorView = new DateSelectorView() unless Coconut.dateSelectorView
Coconut.dateSelectorView.setElement "#dateSelector"
Coconut.dateSelectorView.startDate = startDate
Coconut.dateSelectorView.endDate = endDate
Coconut.dateSelectorView.reportView = reportView
Coconut.dateSelectorView.reportType = reportType
Coconut.dateSelectorView.render()
setDefaultOptions: () ->
return {
type: "Analysis"
startDate: @defaultStartDate()
endDate: @defaultEndDate()
aggregationLevel: "District"
mostSpecificLocationSelected: "ALL"
}
parseOptionsString: (optionString) ->
# Split the string, unescape it, then loop it and put it in a hash
options = {}
optionsArray = _(optionString?.split(/\//)).map (option) -> unescape(option)
for option, index in optionsArray
options[option] = optionsArray[index+1] unless index % 2
return options
parseQueryString: (queryString)->
params = {}
if(queryString)
_.each(
_.map(decodeURI(queryString).split(/&/g),(el,i) ->
aux = el.split('=')
o = {}
if(aux.length >= 1)
val = undefined
if(aux.length == 2)
val = aux[1]
o[aux[0]] = val
return o
),
(o) ->
_.extend(params,o)
)
return params
module.exports = Router
| 80228 | _ = require 'underscore'
$ = jQuery = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $
global.moment = require 'moment'
DashboardView = require './views/DashboardView'
MenuView = require './views/MenuView'
HeaderView = require './views/HeaderView'
UsersView = require './views/UsersView'
DateSelectorView = require './views/DateSelectorView'
IssuesView = require './views/IssuesView'
IssueView = require './views/IssueView'
global.Case = require './models/Case'
CaseView = require './views/CaseView'
DataExportView = require './views/DataExportView'
MapView = require './views/MapView'
FacilityHierarchyView = require './views/FacilityHierarchyView'
RainfallStationView = require './views/RainfallStationView'
GeoHierarchyView = require './views/GeoHierarchyView'
Dhis2View = require './views/Dhis2View'
SystemSettingsView = require './views/SystemSettingsView'
LoginView = require './views/LoginView'
ChangePasswdView = require './views/ChangePasswdView'
User = require './models/User'
Dialog = require './views/Dialog'
MessagingView = require './views/MessagingView'
FindCaseView = require './views/FindCaseView'
Graphs = require './models/Graphs'
GraphView = require './views/GraphView'
IndividualsView = require './views/IndividualsView'
CasesView = require './views/CasesView'
EntomologyInvestigationsView = require './views/EntomologyInvestigationsView'
EntomologySpecimensView = require './views/EntomologySpecimensView'
EntomologyDashboardView = require './views/EntomologyDashboardView'
Test1View = require './views/Test1View'
# This allows us to create new instances of these dynamically based on the URL, for example:
# /reports/Analysis will lead to:
# new reportViews[type]() or new reportView["Analysis"]()
#
#AnalysisView = require './views/AnalysisView'
reportViews = {
"Analysis": require './views/AnalysisView'
"Casefollowup": require './views/CaseFollowupView'
"Individualclassification": require './views/IndividualClassificationView'
"Fociclassification": require './views/FociClassificationView'
"Compareweekly": require './views/CompareWeeklyView'
"Epidemicthreshold": require './views/EpidemicThresholdView'
"Systemerrors": require './views/SystemErrorsView'
"Incidentsgraph": require './views/IncidentsGraphView'
"Periodtrends": require './views/PeriodTrendsView'
"Rainfallreport": require './views/RainfallReportView'
"Usersreport": require './views/UsersReportView'
"WeeklyMeetingReport": require './views/WeeklyMeetingReportView'
"WeeklyFacilityReports": require './views/WeeklyFacilityReportsView'
"CleaningReports": require './views/CleaningReportsView'
"Weeklysummary": require './views/WeeklySummaryView'
"test": require './views/Test1View'
}
activityViews = {
Issues: require './views/IssuesView'
Messaging: require './views/MessagingView'
}
class Router extends Backbone.Router
# caches views
views: {}
# holds option pairs for more complex URLs like for reports
reportViewOptions: {}
activityViewOptions: {}
dateSelectorOptions: {}
noLogin = ["login", "logout", "reset_password"]
execute: (callback, args, name) ->
if noLogin.indexOf(name) is -1
@userLoggedIn
success: =>
args.push(@parseQueryString(args.pop())) if args[0] isnt null
callback.apply(this, args) if (callback)
error: =>
@loginFailed()
else
callback.apply(this, args) if callback
routes:
"": "dashboard"
"login": "login"
"logout": "logout"
"reset_password/:token": "reset_<PASSWORD>"
"reset_password": "<PASSWORD>"
"change_password": "<PASSWORD>"
"admin/dhis2": "dhis2"
"admin/system_settings": "systemSettings"
"admin/users": "users"
"admin/facilities": "FacilityHierarchy"
"admin/rainfall_station": "rainfallStation"
"admin/geo_hierarchy": "geoHierarchy"
"dashboard": "dashboard"
"dashboard/*options": "dashboard"
"export": "dataExport"
"export/*options": "dataExport"
"maps": "maps"
"maps/*options": "maps"
"graph/*options": "graph"
"individuals": "individuals"
"individuals/*options": "individuals"
"cases": "cases"
"cases/*options": "cases"
"reports": "reports"
"reports/*options": "reports" ##reports/type/Analysis/startDate/2016-01-01/endDate/2016-01-01 ->
"find/case": "findCase"
"find/case/:caseID": "findCase"
"show/case/:caseID": "showCase"
"show/cases/:caseID": "showCase"
"show/case/:caseID/:docID": "showCase"
"delete/result/:resultId": "deleteResult"
"new/issue": "newIssue"
"show/issue/:issueID": "showIssue"
"activities": "activities"
"activities/*options": "activities"
"entomology_dashboard": "entomologyDashboard"
"entomology_dashboard/*options": "entomologyDashboard"
"entomology_investigations": "entomologyInvestigations"
"entomology_investigations/*options": "entomologyInvestigations"
"entomology_specimens": "entomologySpecimens"
"entomology_specimens/*options": "entomologySpecimens"
"Test": "Test"
"*noMatch": "noMatch"
entomologyDashboard: (optionString) =>
Coconut.entomologyDashboardView = new EntomologyDashboardView()
Coconut.entomologyDashboardView.setElement $("#content")
Coconut.entomologyDashboardView.options = @parseOptionsString(optionString)
Coconut.entomologyDashboardView.render()
entomologyInvestigations: (optionString) =>
Coconut.entomologyInvestigationsView = new EntomologyInvestigationsView()
Coconut.entomologyInvestigationsView.setElement $("#content")
Coconut.entomologyInvestigationsView.options = @parseOptionsString(optionString)
Coconut.entomologyInvestigationsView.render()
entomologySpecimens: (optionString) =>
Coconut.entomologySpecimensView = new EntomologySpecimensView()
Coconut.entomologySpecimensView.setElement $("#content")
Coconut.entomologySpecimensView.options = @parseOptionsString(optionString)
Coconut.entomologySpecimensView.render()
findCase: (caseId) =>
Coconut.findCaseView or= new FindCaseView()
Coconut.findCaseView.setElement $("#content")
Coconut.findCaseView.caseId = caseId
Coconut.findCaseView.render()
deleteResult: (resultId) =>
if confirm "Are you sure you want to delete #{resultId}"
adminPassword = prompt "Enter the database admin password:"
adminDatabase = new PouchDB (await Coconut.database.info()).host,
"auth.username": "admin"
"auth.password": <PASSWORD>
adminDatabase.get(resultId)
.catch (error) => alert error
.then (result) =>
adminDatabase.destroy(result)
.catch (error) => alert error
.then =>
alert("#{resultId} deleted")
Coconut.router.navigate("#", {trigger:true})
initialize: (appView) ->
@appView = appView
noMatch: =>
console.error "Invalid URL, no matching route: "
$("#content").html "Page not found."
login: ->
Coconut.loginView = new LoginView() if !Coconut.loginView
Coconut.loginView.render()
@listenTo(Coconut.loginView, "success", ->
HTMLHelpers.showBackground('show')
Coconut.router.navigate("#dashboard", {trigger: true})
)
logout: ->
User.logout()
$("span#username").html ""
@login()
loginFailed: ->
Coconut.router.navigate("#login", {trigger: true})
change_password: ->
Coconut.changePasswdView = new ChangePasswdView() if !Coconut.changePasswdView
Coconut.changePasswdView.render()
@listenTo(Coconut.changePasswdView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password has been updated...", 'Password Reset',['Ok'])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#dashboard", {trigger: true})
)
reset_password: (token) ->
$("#login-backgrd").show()
if token
#TODO: Need to search for document with the specified token.
#check if token exist.
# User.checkToken
#if found()
#username should come from the doc with the specified token. Temporarily set to 'test'
username = 'test'
Coconut.ChangePasswordView = new ChangePasswordView() if !Coconut.ChangePasswordView
Coconut.ChangePasswordView.render(username)
@listenTo(Coconut.ChangePasswordView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password reset successful...", "Success",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
)
else
Dialog.createDialogWrap()
Dialog.confirm("Invalid Token or Token expired.", "Error",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
notAdmin: ->
if !(Coconut.currentUser)
@loginFailed()
else
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"]) if(Coconut.currentUser)
reports: (options) =>
# Allows us to get name/value pairs from URL
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
document.title = 'Coconut Surveillance - Reports - #{type}'
@views[type] = new reportViews[type]() unless @views[type]
@views[type].setElement "#content"
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'reports'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
# Needs to refactor later to keep it DRY
activities: (options) =>
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@views[type] = new activityViews[type]() unless @views[type]
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'activities'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
graph: (optionString) ->
document.title = 'Coconut Surveillance - Graph'
Coconut.graphView or= new GraphView()
Coconut.graphView.options = @parseOptionsString(optionString)
Coconut.graphView.render()
individuals: (optionString) ->
document.title = 'Coconut Surveillance - Individuals'
Coconut.individualsView or= new IndividualsView()
Coconut.individualsView.options = @parseOptionsString(optionString)
Coconut.individualsView.render()
cases: (optionString) ->
document.title = 'Coconut Surveillance - Cases'
Coconut.casesView or= new CasesView()
Coconut.casesView.options = @parseOptionsString(optionString)
Coconut.casesView.render()
showCase: (caseID, docID) ->
document.title = "Coconut Surveillance - Case #{caseID}"
Coconut.caseView ?= new CaseView()
Coconut.caseView.case = new Case
caseID: caseID
Coconut.caseView.case.fetch
success: ->
Coconut.caseView.render(docID)
error: (error) ->
alert "Could not display case: #{error}"
dashboard: (options) =>
document.title = 'Coconut Surveillance - Dashboard'
Coconut.dashboardView or= new DashboardView()
options = @parseOptionsString(options)
Coconut.dashboardView.startDate = options?.startDate or Coconut.dashboardView.startDate or @defaultStartDate()
Coconut.dashboardView.endDate = options?.endDate or Coconut.dashboardView.endDate or @defaultEndDate()
Coconut.dashboardView.administrativeLevel = options?.administrativeLevel or Coconut.dashboardView.administrativeLevel or "NATIONAL"
# Just maps different terms to the ones used by dashboard
Coconut.dashboardView.administrativeLevel = {
"FACILITY": "HEALTH FACILITIES"
"DISTRICT": "DISTRICTS"
"SHEHIA": "SHEHIAS"
}[Coconut.dashboardView.administrativeLevel.toUpperCase()] or Coconut.dashboardView.administrativeLevel
Coconut.dashboardView.administrativeName = options?.administrativeName or Coconut.dashboardView.administrativeName or "ZANZIBAR"
console.log Coconut.dashboardView
Coconut.dashboardView.render()
dataExport: =>
[startDate,endDate] = @setStartEndDateIfMissing()
@dataExportView = new DataExportView unless @dataExportView
@dataExportView.startDate = startDate
@dataExportView.endDate = endDate
#@dataExportView.render()
@appView.showView(@dataExportView)
@reportType = 'export'
@showDateFilter(@dataExportView.startDate,@dataExportView.endDate, @dataExportView, @reportType)
maps: (options) =>
document.title = 'Coconut Surveillance - Maps'
options = _(options?.split(/\//)).map (option) -> unescape(option)
# remove type option
options.splice(0,2)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@mapView = new MapView unless @mapView
@mapView.setElement "#content"
#@mapView.render()
@appView.showView(@mapView)
@reportType = 'maps'
dateSelectorView = new DateSelectorView()
dateSelectorView.setElement('#date-selector')
dateSelectorView.reportType = 'maps'
dateSelectorView.render()
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @mapView, @reportType)
HTMLHelpers.ChangeTitle("Maps")
FacilityHierarchy: =>
@adminLoggedIn
success: =>
@facilityHierarchyView = new FacilityHierarchyView unless @facilityHierarchyView
#@facilityHierarchyView.render()
@appView.showView(@facilityHierarchyView)
error: =>
@notAdmin()
rainfallStation: =>
@adminLoggedIn
success: =>
@rainfallStationView = new RainfallStationView unless @rainfallStationView
#@rainfallStationView.render()
@appView.showView(@rainfallStationView)
error: =>
@notAdmin()
geoHierarchy: =>
@adminLoggedIn
success: =>
@geoHierarchyView = new GeoHierarchyView unless @geoHierarchyView
#@geoHierarchyView.render()
@appView.showView(@geoHierarchyView)
error: =>
@notAdmin()
shehiasHighRisk: =>
@adminLoggedIn
success: =>
@shehiasHighRiskView = new ShehiasHighRiskView unless @shehiasHighRiskView
#@shehiasHighRiskView.render()
@appView.showView(@shehiasHighRiskView)
error: =>
@notAdmin()
users: () =>
@adminLoggedIn
success: =>
@usersView = new UsersView() unless @usersView
#@usersView.render()
@appView.showView(@usersView)
error: =>
@notAdmin()
dhis2: () =>
@adminLoggedIn
success: =>
@dhis2View = new Dhis2View() unless @dhis2View
#@dhis2View.render()
@appView.showView(@dhis2View)
error: =>
@notAdmin()
systemSettings: () =>
@adminLoggedIn
success: =>
@systemSettingsView = new SystemSettingsView unless @systemSettingsView
#@systemSettingsView.render()
@appView.showView(@systemSettingsView)
error: =>
@notAdmin()
newIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.issueView.issue = null
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
showIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.database.get issueID
.catch (error) ->
console.error error
.then (result) =>
if(result)
Coconut.issueView.issue = result
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
else
Dialog.createDialogWrap()
Dialog.confirm("Issue not found: <br />#{issueID}", "Database Error",["Ok"])
userLoggedIn: (callback) =>
User.isAuthenticated
success: (user) =>
if Coconut.currentUser.isAdmin() then $("#admin-main").show() else $("#admin-main").hide()
callback.success(user)
error: (error) ->
callback.error()
adminLoggedIn: (callback) ->
@userLoggedIn
success: (user) =>
if user.isAdmin()
callback.success(user)
else
$("#drawer-admin, #admin-main").hide()
$("#content").html "
<dialog id='dialog'>
<div id='dialogContent'> </div>
</dialog>
"
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"])
error: =>
callback.error()
defaultStartDate: =>
moment().subtract(1,'week').startOf('isoWeek').format("YYYY-MM-DD")
defaultEndDate: =>
moment().subtract(1,'week').endOf('isoWeek').format("YYYY-MM-DD")
setStartEndDateIfMissing: (startDate,endDate) =>
startDate = Coconut.router.reportViewOptions.startDate || @defaultStartDate()
endDate = Coconut.router.reportViewOptions.endDate || @defaultEndDate()
[startDate, endDate]
showDateFilter: (startDate, endDate, reportView, reportType) ->
Coconut.dateSelectorView = new DateSelectorView() unless Coconut.dateSelectorView
Coconut.dateSelectorView.setElement "#dateSelector"
Coconut.dateSelectorView.startDate = startDate
Coconut.dateSelectorView.endDate = endDate
Coconut.dateSelectorView.reportView = reportView
Coconut.dateSelectorView.reportType = reportType
Coconut.dateSelectorView.render()
setDefaultOptions: () ->
return {
type: "Analysis"
startDate: @defaultStartDate()
endDate: @defaultEndDate()
aggregationLevel: "District"
mostSpecificLocationSelected: "ALL"
}
parseOptionsString: (optionString) ->
# Split the string, unescape it, then loop it and put it in a hash
options = {}
optionsArray = _(optionString?.split(/\//)).map (option) -> unescape(option)
for option, index in optionsArray
options[option] = optionsArray[index+1] unless index % 2
return options
parseQueryString: (queryString)->
params = {}
if(queryString)
_.each(
_.map(decodeURI(queryString).split(/&/g),(el,i) ->
aux = el.split('=')
o = {}
if(aux.length >= 1)
val = undefined
if(aux.length == 2)
val = aux[1]
o[aux[0]] = val
return o
),
(o) ->
_.extend(params,o)
)
return params
module.exports = Router
| true | _ = require 'underscore'
$ = jQuery = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $
global.moment = require 'moment'
DashboardView = require './views/DashboardView'
MenuView = require './views/MenuView'
HeaderView = require './views/HeaderView'
UsersView = require './views/UsersView'
DateSelectorView = require './views/DateSelectorView'
IssuesView = require './views/IssuesView'
IssueView = require './views/IssueView'
global.Case = require './models/Case'
CaseView = require './views/CaseView'
DataExportView = require './views/DataExportView'
MapView = require './views/MapView'
FacilityHierarchyView = require './views/FacilityHierarchyView'
RainfallStationView = require './views/RainfallStationView'
GeoHierarchyView = require './views/GeoHierarchyView'
Dhis2View = require './views/Dhis2View'
SystemSettingsView = require './views/SystemSettingsView'
LoginView = require './views/LoginView'
ChangePasswdView = require './views/ChangePasswdView'
User = require './models/User'
Dialog = require './views/Dialog'
MessagingView = require './views/MessagingView'
FindCaseView = require './views/FindCaseView'
Graphs = require './models/Graphs'
GraphView = require './views/GraphView'
IndividualsView = require './views/IndividualsView'
CasesView = require './views/CasesView'
EntomologyInvestigationsView = require './views/EntomologyInvestigationsView'
EntomologySpecimensView = require './views/EntomologySpecimensView'
EntomologyDashboardView = require './views/EntomologyDashboardView'
Test1View = require './views/Test1View'
# This allows us to create new instances of these dynamically based on the URL, for example:
# /reports/Analysis will lead to:
# new reportViews[type]() or new reportView["Analysis"]()
#
#AnalysisView = require './views/AnalysisView'
reportViews = {
"Analysis": require './views/AnalysisView'
"Casefollowup": require './views/CaseFollowupView'
"Individualclassification": require './views/IndividualClassificationView'
"Fociclassification": require './views/FociClassificationView'
"Compareweekly": require './views/CompareWeeklyView'
"Epidemicthreshold": require './views/EpidemicThresholdView'
"Systemerrors": require './views/SystemErrorsView'
"Incidentsgraph": require './views/IncidentsGraphView'
"Periodtrends": require './views/PeriodTrendsView'
"Rainfallreport": require './views/RainfallReportView'
"Usersreport": require './views/UsersReportView'
"WeeklyMeetingReport": require './views/WeeklyMeetingReportView'
"WeeklyFacilityReports": require './views/WeeklyFacilityReportsView'
"CleaningReports": require './views/CleaningReportsView'
"Weeklysummary": require './views/WeeklySummaryView'
"test": require './views/Test1View'
}
activityViews = {
Issues: require './views/IssuesView'
Messaging: require './views/MessagingView'
}
class Router extends Backbone.Router
# caches views
views: {}
# holds option pairs for more complex URLs like for reports
reportViewOptions: {}
activityViewOptions: {}
dateSelectorOptions: {}
noLogin = ["login", "logout", "reset_password"]
execute: (callback, args, name) ->
if noLogin.indexOf(name) is -1
@userLoggedIn
success: =>
args.push(@parseQueryString(args.pop())) if args[0] isnt null
callback.apply(this, args) if (callback)
error: =>
@loginFailed()
else
callback.apply(this, args) if callback
routes:
"": "dashboard"
"login": "login"
"logout": "logout"
"reset_password/:token": "reset_PI:PASSWORD:<PASSWORD>END_PI"
"reset_password": "PI:PASSWORD:<PASSWORD>END_PI"
"change_password": "PI:PASSWORD:<PASSWORD>END_PI"
"admin/dhis2": "dhis2"
"admin/system_settings": "systemSettings"
"admin/users": "users"
"admin/facilities": "FacilityHierarchy"
"admin/rainfall_station": "rainfallStation"
"admin/geo_hierarchy": "geoHierarchy"
"dashboard": "dashboard"
"dashboard/*options": "dashboard"
"export": "dataExport"
"export/*options": "dataExport"
"maps": "maps"
"maps/*options": "maps"
"graph/*options": "graph"
"individuals": "individuals"
"individuals/*options": "individuals"
"cases": "cases"
"cases/*options": "cases"
"reports": "reports"
"reports/*options": "reports" ##reports/type/Analysis/startDate/2016-01-01/endDate/2016-01-01 ->
"find/case": "findCase"
"find/case/:caseID": "findCase"
"show/case/:caseID": "showCase"
"show/cases/:caseID": "showCase"
"show/case/:caseID/:docID": "showCase"
"delete/result/:resultId": "deleteResult"
"new/issue": "newIssue"
"show/issue/:issueID": "showIssue"
"activities": "activities"
"activities/*options": "activities"
"entomology_dashboard": "entomologyDashboard"
"entomology_dashboard/*options": "entomologyDashboard"
"entomology_investigations": "entomologyInvestigations"
"entomology_investigations/*options": "entomologyInvestigations"
"entomology_specimens": "entomologySpecimens"
"entomology_specimens/*options": "entomologySpecimens"
"Test": "Test"
"*noMatch": "noMatch"
entomologyDashboard: (optionString) =>
Coconut.entomologyDashboardView = new EntomologyDashboardView()
Coconut.entomologyDashboardView.setElement $("#content")
Coconut.entomologyDashboardView.options = @parseOptionsString(optionString)
Coconut.entomologyDashboardView.render()
entomologyInvestigations: (optionString) =>
Coconut.entomologyInvestigationsView = new EntomologyInvestigationsView()
Coconut.entomologyInvestigationsView.setElement $("#content")
Coconut.entomologyInvestigationsView.options = @parseOptionsString(optionString)
Coconut.entomologyInvestigationsView.render()
entomologySpecimens: (optionString) =>
Coconut.entomologySpecimensView = new EntomologySpecimensView()
Coconut.entomologySpecimensView.setElement $("#content")
Coconut.entomologySpecimensView.options = @parseOptionsString(optionString)
Coconut.entomologySpecimensView.render()
findCase: (caseId) =>
Coconut.findCaseView or= new FindCaseView()
Coconut.findCaseView.setElement $("#content")
Coconut.findCaseView.caseId = caseId
Coconut.findCaseView.render()
deleteResult: (resultId) =>
if confirm "Are you sure you want to delete #{resultId}"
adminPassword = prompt "Enter the database admin password:"
adminDatabase = new PouchDB (await Coconut.database.info()).host,
"auth.username": "admin"
"auth.password": PI:PASSWORD:<PASSWORD>END_PI
adminDatabase.get(resultId)
.catch (error) => alert error
.then (result) =>
adminDatabase.destroy(result)
.catch (error) => alert error
.then =>
alert("#{resultId} deleted")
Coconut.router.navigate("#", {trigger:true})
initialize: (appView) ->
@appView = appView
noMatch: =>
console.error "Invalid URL, no matching route: "
$("#content").html "Page not found."
login: ->
Coconut.loginView = new LoginView() if !Coconut.loginView
Coconut.loginView.render()
@listenTo(Coconut.loginView, "success", ->
HTMLHelpers.showBackground('show')
Coconut.router.navigate("#dashboard", {trigger: true})
)
logout: ->
User.logout()
$("span#username").html ""
@login()
loginFailed: ->
Coconut.router.navigate("#login", {trigger: true})
change_password: ->
Coconut.changePasswdView = new ChangePasswdView() if !Coconut.changePasswdView
Coconut.changePasswdView.render()
@listenTo(Coconut.changePasswdView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password has been updated...", 'Password Reset',['Ok'])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#dashboard", {trigger: true})
)
reset_password: (token) ->
$("#login-backgrd").show()
if token
#TODO: Need to search for document with the specified token.
#check if token exist.
# User.checkToken
#if found()
#username should come from the doc with the specified token. Temporarily set to 'test'
username = 'test'
Coconut.ChangePasswordView = new ChangePasswordView() if !Coconut.ChangePasswordView
Coconut.ChangePasswordView.render(username)
@listenTo(Coconut.ChangePasswordView, "success", ->
Dialog.createDialogWrap()
Dialog.confirm("Password reset successful...", "Success",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
)
else
Dialog.createDialogWrap()
Dialog.confirm("Invalid Token or Token expired.", "Error",["Ok"])
dialog.addEventListener 'close', ->
Coconut.router.navigate("#login", {trigger: true})
notAdmin: ->
if !(Coconut.currentUser)
@loginFailed()
else
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"]) if(Coconut.currentUser)
reports: (options) =>
# Allows us to get name/value pairs from URL
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
document.title = 'Coconut Surveillance - Reports - #{type}'
@views[type] = new reportViews[type]() unless @views[type]
@views[type].setElement "#content"
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'reports'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
# Needs to refactor later to keep it DRY
activities: (options) =>
options = _(options?.split(/\//)).map (option) -> unescape(option)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@views[type] = new activityViews[type]() unless @views[type]
#@views[type].render()
@appView.showView(@views[type])
@reportType = 'activities'
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @views[type], @reportType)
graph: (optionString) ->
document.title = 'Coconut Surveillance - Graph'
Coconut.graphView or= new GraphView()
Coconut.graphView.options = @parseOptionsString(optionString)
Coconut.graphView.render()
individuals: (optionString) ->
document.title = 'Coconut Surveillance - Individuals'
Coconut.individualsView or= new IndividualsView()
Coconut.individualsView.options = @parseOptionsString(optionString)
Coconut.individualsView.render()
cases: (optionString) ->
document.title = 'Coconut Surveillance - Cases'
Coconut.casesView or= new CasesView()
Coconut.casesView.options = @parseOptionsString(optionString)
Coconut.casesView.render()
showCase: (caseID, docID) ->
document.title = "Coconut Surveillance - Case #{caseID}"
Coconut.caseView ?= new CaseView()
Coconut.caseView.case = new Case
caseID: caseID
Coconut.caseView.case.fetch
success: ->
Coconut.caseView.render(docID)
error: (error) ->
alert "Could not display case: #{error}"
dashboard: (options) =>
document.title = 'Coconut Surveillance - Dashboard'
Coconut.dashboardView or= new DashboardView()
options = @parseOptionsString(options)
Coconut.dashboardView.startDate = options?.startDate or Coconut.dashboardView.startDate or @defaultStartDate()
Coconut.dashboardView.endDate = options?.endDate or Coconut.dashboardView.endDate or @defaultEndDate()
Coconut.dashboardView.administrativeLevel = options?.administrativeLevel or Coconut.dashboardView.administrativeLevel or "NATIONAL"
# Just maps different terms to the ones used by dashboard
Coconut.dashboardView.administrativeLevel = {
"FACILITY": "HEALTH FACILITIES"
"DISTRICT": "DISTRICTS"
"SHEHIA": "SHEHIAS"
}[Coconut.dashboardView.administrativeLevel.toUpperCase()] or Coconut.dashboardView.administrativeLevel
Coconut.dashboardView.administrativeName = options?.administrativeName or Coconut.dashboardView.administrativeName or "ZANZIBAR"
console.log Coconut.dashboardView
Coconut.dashboardView.render()
dataExport: =>
[startDate,endDate] = @setStartEndDateIfMissing()
@dataExportView = new DataExportView unless @dataExportView
@dataExportView.startDate = startDate
@dataExportView.endDate = endDate
#@dataExportView.render()
@appView.showView(@dataExportView)
@reportType = 'export'
@showDateFilter(@dataExportView.startDate,@dataExportView.endDate, @dataExportView, @reportType)
maps: (options) =>
document.title = 'Coconut Surveillance - Maps'
options = _(options?.split(/\//)).map (option) -> unescape(option)
# remove type option
options.splice(0,2)
_.each options, (option,index) =>
@reportViewOptions[option] = options[index+1] unless index % 2
defaultOptions = @setDefaultOptions()
# Set the default option if it isn't already set
_(defaultOptions).each (defaultValue, option) =>
@reportViewOptions[option] = @reportViewOptions[option] or defaultValue
type = @reportViewOptions["type"]
@mapView = new MapView unless @mapView
@mapView.setElement "#content"
#@mapView.render()
@appView.showView(@mapView)
@reportType = 'maps'
dateSelectorView = new DateSelectorView()
dateSelectorView.setElement('#date-selector')
dateSelectorView.reportType = 'maps'
dateSelectorView.render()
@showDateFilter(Coconut.router.reportViewOptions.startDate, Coconut.router.reportViewOptions.endDate, @mapView, @reportType)
HTMLHelpers.ChangeTitle("Maps")
FacilityHierarchy: =>
@adminLoggedIn
success: =>
@facilityHierarchyView = new FacilityHierarchyView unless @facilityHierarchyView
#@facilityHierarchyView.render()
@appView.showView(@facilityHierarchyView)
error: =>
@notAdmin()
rainfallStation: =>
@adminLoggedIn
success: =>
@rainfallStationView = new RainfallStationView unless @rainfallStationView
#@rainfallStationView.render()
@appView.showView(@rainfallStationView)
error: =>
@notAdmin()
geoHierarchy: =>
@adminLoggedIn
success: =>
@geoHierarchyView = new GeoHierarchyView unless @geoHierarchyView
#@geoHierarchyView.render()
@appView.showView(@geoHierarchyView)
error: =>
@notAdmin()
shehiasHighRisk: =>
@adminLoggedIn
success: =>
@shehiasHighRiskView = new ShehiasHighRiskView unless @shehiasHighRiskView
#@shehiasHighRiskView.render()
@appView.showView(@shehiasHighRiskView)
error: =>
@notAdmin()
users: () =>
@adminLoggedIn
success: =>
@usersView = new UsersView() unless @usersView
#@usersView.render()
@appView.showView(@usersView)
error: =>
@notAdmin()
dhis2: () =>
@adminLoggedIn
success: =>
@dhis2View = new Dhis2View() unless @dhis2View
#@dhis2View.render()
@appView.showView(@dhis2View)
error: =>
@notAdmin()
systemSettings: () =>
@adminLoggedIn
success: =>
@systemSettingsView = new SystemSettingsView unless @systemSettingsView
#@systemSettingsView.render()
@appView.showView(@systemSettingsView)
error: =>
@notAdmin()
newIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.issueView.issue = null
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
showIssue: (issueID) =>
Coconut.issueView ?= new IssueView()
Coconut.database.get issueID
.catch (error) ->
console.error error
.then (result) =>
if(result)
Coconut.issueView.issue = result
#Coconut.issueView.render()
@appView.showView(Coconut.issueView)
else
Dialog.createDialogWrap()
Dialog.confirm("Issue not found: <br />#{issueID}", "Database Error",["Ok"])
userLoggedIn: (callback) =>
User.isAuthenticated
success: (user) =>
if Coconut.currentUser.isAdmin() then $("#admin-main").show() else $("#admin-main").hide()
callback.success(user)
error: (error) ->
callback.error()
adminLoggedIn: (callback) ->
@userLoggedIn
success: (user) =>
if user.isAdmin()
callback.success(user)
else
$("#drawer-admin, #admin-main").hide()
$("#content").html "
<dialog id='dialog'>
<div id='dialogContent'> </div>
</dialog>
"
Dialog.confirm("You do not have admin privileges", "Warning",["Ok"])
error: =>
callback.error()
defaultStartDate: =>
moment().subtract(1,'week').startOf('isoWeek').format("YYYY-MM-DD")
defaultEndDate: =>
moment().subtract(1,'week').endOf('isoWeek').format("YYYY-MM-DD")
setStartEndDateIfMissing: (startDate,endDate) =>
startDate = Coconut.router.reportViewOptions.startDate || @defaultStartDate()
endDate = Coconut.router.reportViewOptions.endDate || @defaultEndDate()
[startDate, endDate]
showDateFilter: (startDate, endDate, reportView, reportType) ->
Coconut.dateSelectorView = new DateSelectorView() unless Coconut.dateSelectorView
Coconut.dateSelectorView.setElement "#dateSelector"
Coconut.dateSelectorView.startDate = startDate
Coconut.dateSelectorView.endDate = endDate
Coconut.dateSelectorView.reportView = reportView
Coconut.dateSelectorView.reportType = reportType
Coconut.dateSelectorView.render()
setDefaultOptions: () ->
return {
type: "Analysis"
startDate: @defaultStartDate()
endDate: @defaultEndDate()
aggregationLevel: "District"
mostSpecificLocationSelected: "ALL"
}
parseOptionsString: (optionString) ->
# Split the string, unescape it, then loop it and put it in a hash
options = {}
optionsArray = _(optionString?.split(/\//)).map (option) -> unescape(option)
for option, index in optionsArray
options[option] = optionsArray[index+1] unless index % 2
return options
parseQueryString: (queryString)->
params = {}
if(queryString)
_.each(
_.map(decodeURI(queryString).split(/&/g),(el,i) ->
aux = el.split('=')
o = {}
if(aux.length >= 1)
val = undefined
if(aux.length == 2)
val = aux[1]
o[aux[0]] = val
return o
),
(o) ->
_.extend(params,o)
)
return params
module.exports = Router
|
[
{
"context": "pHandler\", ->\n\n\tbeforeEach ->\n\t\t@adminUser_id = \"12321\"\n\t\t@newEmail = \"bob@smith.com\"\n\t\t@user_id = \"3121",
"end": 299,
"score": 0.7386924028396606,
"start": 295,
"tag": "USERNAME",
"value": "2321"
},
{
"context": "eEach ->\n\t\t@adminUser_id = \"12321\... | test/unit/coffee/Subscription/SubscriptionGroupHandlerTests.coffee | shyoshyo/web-sharelatex | 1 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupHandler"
describe "SubscriptionGroupHandler", ->
beforeEach ->
@adminUser_id = "12321"
@newEmail = "bob@smith.com"
@user_id = "3121321"
@email = "jim@example.com"
@user = {_id:@user_id, email:@newEmail}
@subscription_id = "31DSd1123D"
@subscription =
admin_id: @adminUser_id
manager_ids: [@adminUser_id]
_id:@subscription_id
@SubscriptionLocator =
getUsersSubscription: sinon.stub()
getSubscriptionByMemberIdAndId: sinon.stub()
getSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@UserCreator =
getUserOrCreateHoldingAccount: sinon.stub().callsArgWith(1, null, @user)
@SubscriptionUpdater =
removeUserFromGroup: sinon.stub().callsArgWith(2)
getSubscription: sinon.stub().callsArgWith(2)
@TeamInvitesHandler =
createInvite: sinon.stub().callsArgWith(2)
@UserGetter =
getUser: sinon.stub()
getUserByAnyEmail: sinon.stub()
@LimitationsManager =
hasGroupMembersLimitReached: sinon.stub()
@OneTimeTokenHandler =
getValueFromTokenAndExpire:sinon.stub()
getNewToken:sinon.stub()
@EmailHandler =
sendEmail:sinon.stub()
@Subscription =
update: sinon.stub().yields()
findOne: sinon.stub().yields()
@settings =
siteUrl:"http://www.sharelatex.com"
@readStub = sinon.stub()
@NotificationsBuilder =
groupPlan: sinon.stub().returns({read:@readStub})
@UserMembershipViewModel =
build: (email) -> { email }
@Handler = SandboxedModule.require modulePath, requires:
"logger-sharelatex": log:->
"../User/UserCreator": @UserCreator
"./SubscriptionUpdater": @SubscriptionUpdater
"./SubscriptionLocator": @SubscriptionLocator
"../../models/Subscription": Subscription: @Subscription
"../User/UserGetter": @UserGetter
"./LimitationsManager": @LimitationsManager
"../Security/OneTimeTokenHandler":@OneTimeTokenHandler
"../Email/EmailHandler":@EmailHandler
"settings-sharelatex":@settings
"../Notifications/NotificationsBuilder": @NotificationsBuilder
"../UserMembership/UserMembershipViewModel": @UserMembershipViewModel
"logger-sharelatex":
err:->
log:->
warn:->
describe "removeUserFromGroup", ->
it "should call the subscription updater to remove the user", (done)->
@Handler.removeUserFromGroup @adminUser_id, @user._id, (err)=>
@SubscriptionUpdater.removeUserFromGroup.calledWith(@adminUser_id, @user._id).should.equal true
done()
describe "replaceUserReferencesInGroups", ->
beforeEach (done)->
@oldId = "ba5eba11"
@newId = "5ca1ab1e"
@Handler.replaceUserReferencesInGroups @oldId, @newId, ->
done()
it "replaces the admin_id", ->
@Subscription.update.calledWith(
{ admin_id: @oldId },
{ admin_id: @newId }
).should.equal true
it "replaces the manager_ids", ->
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$addToSet:{manager_ids:"5ca1ab1e"}},{multi:true}
).should.equal true
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$pull:{manager_ids:"ba5eba11"}},{multi:true}
).should.equal true
it "replaces the member ids", ->
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $addToSet: { member_ids: @newId } }
).should.equal true
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $pull: { member_ids: @oldId } }
).should.equal true
describe "isUserPartOfGroup", ->
beforeEach ->
@subscription_id = "123ed13123"
it "should return true when user is part of subscription", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null, {_id:@subscription_id})
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal true
done()
it "should return false when no subscription is found", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null)
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal false
done()
describe "getTotalConfirmedUsersInGroup", ->
describe "for existing subscriptions", ->
beforeEach ->
@subscription.member_ids = ["12321", "3121321"]
it "should call the subscription locator and return 2 users", (done)->
@Handler.getTotalConfirmedUsersInGroup @subscription_id, (err, count)=>
@SubscriptionLocator.getSubscription.calledWith(@subscription_id).should.equal true
count.should.equal 2
done()
describe "for nonexistent subscriptions", ->
it "should return undefined", (done)->
@Handler.getTotalConfirmedUsersInGroup "fake-id", (err, count)=>
should.not.exist(count)
done()
| 119890 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupHandler"
describe "SubscriptionGroupHandler", ->
beforeEach ->
@adminUser_id = "12321"
@newEmail = "<EMAIL>"
@user_id = "3121321"
@email = "<EMAIL>"
@user = {_id:@user_id, email:@newEmail}
@subscription_id = "31DSd1123D"
@subscription =
admin_id: @adminUser_id
manager_ids: [@adminUser_id]
_id:@subscription_id
@SubscriptionLocator =
getUsersSubscription: sinon.stub()
getSubscriptionByMemberIdAndId: sinon.stub()
getSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@UserCreator =
getUserOrCreateHoldingAccount: sinon.stub().callsArgWith(1, null, @user)
@SubscriptionUpdater =
removeUserFromGroup: sinon.stub().callsArgWith(2)
getSubscription: sinon.stub().callsArgWith(2)
@TeamInvitesHandler =
createInvite: sinon.stub().callsArgWith(2)
@UserGetter =
getUser: sinon.stub()
getUserByAnyEmail: sinon.stub()
@LimitationsManager =
hasGroupMembersLimitReached: sinon.stub()
@OneTimeTokenHandler =
getValueFromTokenAndExpire:sinon.stub()
getNewToken:sinon.stub()
@EmailHandler =
sendEmail:sinon.stub()
@Subscription =
update: sinon.stub().yields()
findOne: sinon.stub().yields()
@settings =
siteUrl:"http://www.sharelatex.com"
@readStub = sinon.stub()
@NotificationsBuilder =
groupPlan: sinon.stub().returns({read:@readStub})
@UserMembershipViewModel =
build: (email) -> { email }
@Handler = SandboxedModule.require modulePath, requires:
"logger-sharelatex": log:->
"../User/UserCreator": @UserCreator
"./SubscriptionUpdater": @SubscriptionUpdater
"./SubscriptionLocator": @SubscriptionLocator
"../../models/Subscription": Subscription: @Subscription
"../User/UserGetter": @UserGetter
"./LimitationsManager": @LimitationsManager
"../Security/OneTimeTokenHandler":@OneTimeTokenHandler
"../Email/EmailHandler":@EmailHandler
"settings-sharelatex":@settings
"../Notifications/NotificationsBuilder": @NotificationsBuilder
"../UserMembership/UserMembershipViewModel": @UserMembershipViewModel
"logger-sharelatex":
err:->
log:->
warn:->
describe "removeUserFromGroup", ->
it "should call the subscription updater to remove the user", (done)->
@Handler.removeUserFromGroup @adminUser_id, @user._id, (err)=>
@SubscriptionUpdater.removeUserFromGroup.calledWith(@adminUser_id, @user._id).should.equal true
done()
describe "replaceUserReferencesInGroups", ->
beforeEach (done)->
@oldId = "ba5eba11"
@newId = "5ca1ab1e"
@Handler.replaceUserReferencesInGroups @oldId, @newId, ->
done()
it "replaces the admin_id", ->
@Subscription.update.calledWith(
{ admin_id: @oldId },
{ admin_id: @newId }
).should.equal true
it "replaces the manager_ids", ->
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$addToSet:{manager_ids:"5ca1ab1e"}},{multi:true}
).should.equal true
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$pull:{manager_ids:"ba5eba11"}},{multi:true}
).should.equal true
it "replaces the member ids", ->
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $addToSet: { member_ids: @newId } }
).should.equal true
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $pull: { member_ids: @oldId } }
).should.equal true
describe "isUserPartOfGroup", ->
beforeEach ->
@subscription_id = "123ed13123"
it "should return true when user is part of subscription", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null, {_id:@subscription_id})
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal true
done()
it "should return false when no subscription is found", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null)
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal false
done()
describe "getTotalConfirmedUsersInGroup", ->
describe "for existing subscriptions", ->
beforeEach ->
@subscription.member_ids = ["12321", "3121321"]
it "should call the subscription locator and return 2 users", (done)->
@Handler.getTotalConfirmedUsersInGroup @subscription_id, (err, count)=>
@SubscriptionLocator.getSubscription.calledWith(@subscription_id).should.equal true
count.should.equal 2
done()
describe "for nonexistent subscriptions", ->
it "should return undefined", (done)->
@Handler.getTotalConfirmedUsersInGroup "fake-id", (err, count)=>
should.not.exist(count)
done()
| true | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupHandler"
describe "SubscriptionGroupHandler", ->
beforeEach ->
@adminUser_id = "12321"
@newEmail = "PI:EMAIL:<EMAIL>END_PI"
@user_id = "3121321"
@email = "PI:EMAIL:<EMAIL>END_PI"
@user = {_id:@user_id, email:@newEmail}
@subscription_id = "31DSd1123D"
@subscription =
admin_id: @adminUser_id
manager_ids: [@adminUser_id]
_id:@subscription_id
@SubscriptionLocator =
getUsersSubscription: sinon.stub()
getSubscriptionByMemberIdAndId: sinon.stub()
getSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@UserCreator =
getUserOrCreateHoldingAccount: sinon.stub().callsArgWith(1, null, @user)
@SubscriptionUpdater =
removeUserFromGroup: sinon.stub().callsArgWith(2)
getSubscription: sinon.stub().callsArgWith(2)
@TeamInvitesHandler =
createInvite: sinon.stub().callsArgWith(2)
@UserGetter =
getUser: sinon.stub()
getUserByAnyEmail: sinon.stub()
@LimitationsManager =
hasGroupMembersLimitReached: sinon.stub()
@OneTimeTokenHandler =
getValueFromTokenAndExpire:sinon.stub()
getNewToken:sinon.stub()
@EmailHandler =
sendEmail:sinon.stub()
@Subscription =
update: sinon.stub().yields()
findOne: sinon.stub().yields()
@settings =
siteUrl:"http://www.sharelatex.com"
@readStub = sinon.stub()
@NotificationsBuilder =
groupPlan: sinon.stub().returns({read:@readStub})
@UserMembershipViewModel =
build: (email) -> { email }
@Handler = SandboxedModule.require modulePath, requires:
"logger-sharelatex": log:->
"../User/UserCreator": @UserCreator
"./SubscriptionUpdater": @SubscriptionUpdater
"./SubscriptionLocator": @SubscriptionLocator
"../../models/Subscription": Subscription: @Subscription
"../User/UserGetter": @UserGetter
"./LimitationsManager": @LimitationsManager
"../Security/OneTimeTokenHandler":@OneTimeTokenHandler
"../Email/EmailHandler":@EmailHandler
"settings-sharelatex":@settings
"../Notifications/NotificationsBuilder": @NotificationsBuilder
"../UserMembership/UserMembershipViewModel": @UserMembershipViewModel
"logger-sharelatex":
err:->
log:->
warn:->
describe "removeUserFromGroup", ->
it "should call the subscription updater to remove the user", (done)->
@Handler.removeUserFromGroup @adminUser_id, @user._id, (err)=>
@SubscriptionUpdater.removeUserFromGroup.calledWith(@adminUser_id, @user._id).should.equal true
done()
describe "replaceUserReferencesInGroups", ->
beforeEach (done)->
@oldId = "ba5eba11"
@newId = "5ca1ab1e"
@Handler.replaceUserReferencesInGroups @oldId, @newId, ->
done()
it "replaces the admin_id", ->
@Subscription.update.calledWith(
{ admin_id: @oldId },
{ admin_id: @newId }
).should.equal true
it "replaces the manager_ids", ->
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$addToSet:{manager_ids:"5ca1ab1e"}},{multi:true}
).should.equal true
@Subscription.update.calledWith(
{manager_ids:"ba5eba11"},{$pull:{manager_ids:"ba5eba11"}},{multi:true}
).should.equal true
it "replaces the member ids", ->
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $addToSet: { member_ids: @newId } }
).should.equal true
@Subscription.update.calledWith(
{ member_ids: @oldId },
{ $pull: { member_ids: @oldId } }
).should.equal true
describe "isUserPartOfGroup", ->
beforeEach ->
@subscription_id = "123ed13123"
it "should return true when user is part of subscription", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null, {_id:@subscription_id})
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal true
done()
it "should return false when no subscription is found", (done)->
@SubscriptionLocator.getSubscriptionByMemberIdAndId.callsArgWith(2, null)
@Handler.isUserPartOfGroup @user_id, @subscription_id, (err, partOfGroup)->
partOfGroup.should.equal false
done()
describe "getTotalConfirmedUsersInGroup", ->
describe "for existing subscriptions", ->
beforeEach ->
@subscription.member_ids = ["12321", "3121321"]
it "should call the subscription locator and return 2 users", (done)->
@Handler.getTotalConfirmedUsersInGroup @subscription_id, (err, count)=>
@SubscriptionLocator.getSubscription.calledWith(@subscription_id).should.equal true
count.should.equal 2
done()
describe "for nonexistent subscriptions", ->
it "should return undefined", (done)->
@Handler.getTotalConfirmedUsersInGroup "fake-id", (err, count)=>
should.not.exist(count)
done()
|
[
{
"context": "umber}, b, upsert: true, ->\n\n User.findOrCreate('zfogg')\n User.findOrCreate('zoodle')\n\n\nexports.buildin",
"end": 218,
"score": 0.9996562600135803,
"start": 213,
"tag": "USERNAME",
"value": "zfogg"
},
{
"context": " User.findOrCreate('zfogg')\n User.findOrCreat... | server/bootstrap_db.coffee | zfogg/turtlemap | 1 | mongoose = require('mongoose')
{Building, User, Course} = require './models'
exports.bootstrap = ->
for b in BUILDINGS
Building.update {number: b.number}, b, upsert: true, ->
User.findOrCreate('zfogg')
User.findOrCreate('zoodle')
exports.buildings = BUILDINGS = [
name: "251 North"
code: ""
number: "251"
lng: "-76.9496090325357"
lat: "38.99274005"
,
name: "94th Aero Squadron"
code: ""
number: "F08"
lng: "-76.9210122711411"
lat: "38.9781702"
,
name: "A Harvey Wiley Federal Building FDA"
code: "FDA"
number: "F01"
lng: "-76.9261965846492"
lat: "38.9770124"
,
name: "A.V. Williams Building"
code: "AVW"
number: "115"
lng: "-76.9363418604721"
lat: "38.99079905"
,
name: "Adele H. Stamp Student Union Buildings"
code: "SSU"
number: "163"
lng: "-76.9447218233957"
lat: "38.98816455"
,
name: "Agriculture Shed"
code: ""
number: "102"
lng: "-76.9407337234236"
lat: "38.99198175"
,
name: "Allegany Hall (Residence Hall)"
code: "AGY"
number: "024"
lng: "-76.9414146"
lat: "38.98157935"
,
name: "Alpha Chi Omega Sorority"
code: ""
number: "172"
lng: "-76.9360293156412"
lat: "38.9814223"
,
name: "Alpha Delta Pi Sorority"
code: ""
number: "170"
lng: "-76.9352079623363"
lat: "38.981239"
,
name: "Alpha Epsilon Phi Sorority"
code: ""
number: "136"
lng: "-76.93562535"
lat: "38.98324535"
,
name: "Alpha Epsilon Pi"
code: ""
number: "129"
lng: "-76.9351584"
lat: "38.9846571"
,
name: "Alpha Phi Sorority"
code: ""
number: "176"
lng: "-76.9352442035259"
lat: "38.9808974"
,
name: "Alpha Sigma Phi Fraternity"
code: ""
number: "134"
lng: "-76.9346571906316"
lat: "38.98329385"
,
name: "American Center for Physics"
code: ""
number: "F03"
lng: "-76.9299850652696"
lat: "38.97152715"
,
name: "Animal Science Service Building"
code: ""
number: "103"
lng: "-76.9410828738213"
lat: "38.99200365"
,
name: "Animal Science/Agricultural Engineering Building"
code: "ANS"
number: "142"
lng: "-76.9394592785606"
lat: "38.99164625"
,
name: "Annapolis Hall"
code: ""
number: "008"
lng: "-76.9400675383842"
lat: "38.9822303"
,
name: "Anne Arundel Hall"
code: "ANA"
number: "060"
lng: "-76.94673185"
lat: "38.98595025"
,
name: "Apiary"
code: ""
number: "156"
lng: "-76.9487330028218"
lat: "38.99193245"
,
name: "Aquatics Center"
code: ""
number: "399"
lng: "-76.9464978414166"
lat: "38.9933884"
,
name: "Architecture Building"
code: "ARC"
number: "145"
lng: "-76.9475462198212"
lat: "38.98423555"
,
name: "Art-Sociology Building"
code: "ASY"
number: "146"
lng: "-76.9478947752967"
lat: "38.98528145"
,
name: "Baltimore Hall (Residence Hall)"
code: "BAL"
number: "016"
lng: "-76.9421925409139"
lat: "38.9822469"
,
name: "Bel Air Hall (Residence Hall)"
code: "BEL"
number: "099"
lng: "-76.9426116"
lat: "38.99281705"
,
name: "Benjamin Building"
code: "EDU"
number: "143"
lng: "-76.9474215"
lat: "38.9867095"
,
name: "Biology-Psychology Building"
code: "BPS"
number: "144"
lng: "-76.9425929312518"
lat: "38.98872265"
,
name: "Biomolecular Sciences Building"
code: ""
number: "296"
lng: "-76.9376944355845"
lat: "38.99285415"
,
name: "Biosciences Research Building"
code: "BRB"
number: "413"
lng: "-76.9428094130756"
lat: "38.98897595"
,
name: "Blacksmith Shop"
code: ""
number: "119"
lng: "-76.94089305"
lat: "38.9922976"
,
name: "Building Services Operations Building"
code: ""
number: "215"
lng: "-76.9346037302467"
lat: "38.98626085"
,
name: "Byrd Stadium"
code: "BRD"
number: "364"
lng: "-76.9475181930938"
lat: "38.99033895"
,
name: "Byrd Stadium Maintenance Building"
code: ""
number: "369"
lng: "-76.9458711262378"
lat: "38.99025255"
,
name: "Calvert Hall (Residence Hall)"
code: "CAL"
number: "015"
lng: "-76.9423134911421"
lat: "38.98292125"
,
name: "Cambridge Community Center"
code: "CCC"
number: "097"
lng: "-76.9429981"
lat: "38.99217805"
,
name: "Cambridge Hall (Residence Hall)"
code: "CAM"
number: "096"
lng: "-76.9430174173239"
lat: "38.99172105"
,
name: "Caroline Hall (Residence Hall)"
code: "CAR"
number: "070"
lng: "-76.9457925"
lat: "38.9834966"
,
name: "Caroll Hall (Residence Hall)"
code: "CRL"
number: "065"
lng: "-76.9456253332123"
lat: "38.9839834"
,
name: "Catholic Student Center"
code: ""
number: "c104"
lng: "-76.9450898077953"
lat: "38.98056895"
,
name: "Cattle Barn"
code: ""
number: "110"
lng: "-76.9403740849109"
lat: "38.9923982"
,
name: "Cecil Hall"
code: "CEC"
number: "017"
lng: "-76.9416573737624"
lat: "38.9829463"
,
name: "Center for the Advanced Study of Language"
code: ""
number: "F02"
lng: "-76.9256352793971"
lat: "38.9741989"
,
name: "Center for Young Children"
code: ""
number: "381"
lng: "-76.9484830471325"
lat: "38.99352845"
,
name: "Central Animal Resources Facility"
code: ""
number: "087"
lng: "-76.9383409902712"
lat: "38.9917051"
,
name: "Centreville Hall (Residence Hall)"
code: "CEN"
number: "098"
lng: "-76.9421248952804"
lat: "38.992271"
,
name: "Chabad Jewish Student Center"
code: ""
number: "c102"
lng: "-76.9332465412442"
lat: "38.9803053"
,
name: "Challenge Course"
code: ""
number: "d108"
lng: "-76.9452623440046"
lat: "38.99415265"
,
name: "Chapel Fields"
code: ""
number: "d103"
lng: "-76.9389471052562"
lat: "38.9837497"
,
name: "Charles Hall (Residence Hall)"
code: "CHL"
number: "025"
lng: "-76.9405193937813"
lat: "38.9816226"
,
name: "Chemical and Nuclear Engineering Building"
code: "CHE"
number: "090"
lng: "-76.9395374654513"
lat: "38.99060895"
,
name: "Chemistry Building"
code: "CHM"
number: "091"
lng: "-76.940171718657"
lat: "38.98959475"
,
name: "Chesapeake Building"
code: ""
number: "338"
lng: "-76.94215145"
lat: "38.99828345"
,
name: "Chestertown Hall (Residence Hall)"
code: "CHS"
number: "121"
lng: "-76.9434348"
lat: "38.99280355"
,
name: "Chincoteague Hall"
code: ""
number: "059"
lng: "-76.9445192324004"
lat: "38.9852322"
,
name: "Clarice Smith Performing Arts Center"
code: "PAC"
number: "386"
lng: "-76.9504434053224"
lat: "38.9906807"
,
name: "Climbing Wall"
code: ""
number: "d107"
lng: "-76.9458984242081"
lat: "38.99419435"
,
name: "Cole Student Activities Building"
code: "COL"
number: "162"
lng: "-76.9467662555511"
lat: "38.98797155"
,
name: "College Park Fire Station"
code: ""
number: "802"
lng: "-76.9336943536548"
lat: "38.9903857"
,
name: "College Park LDS Institute of Religion"
code: ""
number: "c106"
lng: "-76.9474186908036"
lat: "38.981888"
,
name: "College Park Metro Station"
code: ""
number: "p256"
lng: "-76.92800716569322"
lat: "38.97825115"
,
name: "Comcast Center"
code: ""
number: "360"
lng: "-76.9413366066757"
lat: "38.9956305"
,
name: "Computer and Space Sciences Building"
code: "CSS"
number: "224"
lng: "-76.9425669540912"
lat: "38.9909812"
,
name: "Computer Science Instructional Center"
code: "CSI"
number: "406"
lng: "-76.9361946"
lat: "38.98999465"
,
name: "Cumberland Hall (Residence Hall)"
code: "CBD"
number: "122"
lng: "-76.9439244117559"
lat: "38.9922614"
,
name: "Delta Chi Fraternity"
code: ""
number: "131"
lng: "-76.9341748748757"
lat: "38.98432725"
,
name: "Delta Gamma Sorority"
code: ""
number: "175"
lng: "-76.9354323628479"
lat: "38.9805908"
,
name: "Delta Phi Epsilon Sorority"
code: ""
number: "173"
lng: "-76.9363517104313"
lat: "38.9807542"
,
name: "Denton Hall (Residence Hall)"
code: "DEN"
number: "252"
lng: "-76.9499786106729"
lat: "38.99223865"
,
name: "Dorchester Hall (Residence Hall)"
code: "DOR"
number: "064"
lng: "-76.9461812019092"
lat: "38.9867742"
,
name: "DOTS Impound"
code: ""
number: "p248"
lng: "-76.931625967854"
lat: "38.98291445"
,
name: "Easton Hall (Residence Hall)"
code: "EAS"
number: "253"
lng: "-76.9502564099265"
lat: "38.9930193"
,
name: "Elkton Hall (Residence Hall)"
code: "ELK"
number: "254"
lng: "-76.9489576649778"
lat: "38.9925108"
,
name: "Ellicott Hall (Residence Hall)"
code: "ELL"
number: "256"
lng: "-76.9466586443134"
lat: "38.9918228"
,
name: "Energy Plant"
code: ""
number: "001"
lng: "-76.935683449548"
lat: "38.98585305"
,
name: "Energy Research Facility"
code: ""
number: "223"
lng: "-76.936862109123"
lat: "38.99192655"
,
name: "Engineering Annex"
code: "EAB"
number: "093"
lng: "-76.9370872500394"
lat: "38.99073865"
,
name: "Engineering Fields"
code: ""
number: "d104"
lng: "-76.9375298581359"
lat: "38.98753725"
,
name: "Engineering Laboratory Building"
code: "EGL"
number: "089"
lng: "-76.9379467111811"
lat: "38.98926235"
,
name: "Environmental Services Facility"
code: ""
number: "344"
lng: "-76.9437334181758"
lat: "38.9965269"
,
name: "Episcopal Student Center"
code: ""
number: "c107"
lng: "-76.9358864728133"
lat: "38.9820118"
,
name: "Eppley Campus Recreation Center"
code: "CRC"
number: "068"
lng: "-76.9452703132821"
lat: "38.99358165"
,
name: "Field Hockey and Lacrosse Complex"
code: ""
number: "414"
lng: "-76.9368614977832"
lat: "38.99472745"
,
name: "Francis Scott Key Hall"
code: "KEY"
number: "048"
lng: "-76.9430892753028"
lat: "38.98506175"
,
name: "Fraternity Fields"
code: ""
number: "d105"
lng: "-76.9358798758218"
lat: "38.9839311"
,
name: "Frederick Hall (Residence Hall)"
code: "FRD"
number: "029"
lng: "-76.9407467785094"
lat: "38.98204525"
,
name: "Garrett Hall (Residence Hall)"
code: "GRT"
number: "031"
lng: "-76.9426804"
lat: "38.98323575"
,
name: "Gate House"
code: ""
number: "299"
lng: "-76.9357643250823"
lat: "38.98864785"
,
name: "Geology Building"
code: "GEO"
number: "237"
lng: "-76.9409099576973"
lat: "38.9881304"
,
name: "Golf Course Clubhouse"
code: "GLF"
number: "166"
lng: "-76.9547070820405"
lat: "38.99113325"
,
name: "Golf Course Maintenance"
code: ""
number: "314"
lng: "-76.9531787893985"
lat: "38.9973127"
,
name: "Golub Property"
code: ""
number: "808"
lng: "-76.9301602208332"
lat: "38.96954285"
,
name: "Gossett Football Team House"
code: ""
number: "379"
lng: "-76.9462999828512"
lat: "38.98978335"
,
name: "Graduate Garden Apartment Complex"
code: ""
number: "260"
lng: "-76.9573161915849"
lat: "38.9845594"
,
name: "Ground Herbicide/Pesticide Storage Building"
code: ""
number: "327"
lng: "-76.94380855"
lat: "38.9956043"
,
name: "Grounds Material and Equipment Building"
code: ""
number: "328"
lng: "-76.94330185"
lat: "38.99561845"
,
name: "Grounds Office Building"
code: ""
number: "050"
lng: "-76.9437121"
lat: "38.99586895"
,
name: "Grounds Operations and Maintenance Facility"
code: ""
number: "124"
lng: "-76.9428899"
lat: "38.9955292"
,
name: "H.J. Patterson Hall"
code: "HJP"
number: "073"
lng: "-76.9432766035148"
lat: "38.98708535"
,
name: "Hagerstown Hall (Residence Hall)"
code: "HAG"
number: "258"
lng: "-76.9474136961276"
lat: "38.9924384"
,
name: "Harford Hall (Residence Hall)"
code: "HRF"
number: "014"
lng: "-76.9408308448032"
lat: "38.9824913"
,
name: "Harrison Lab"
code: "HAR"
number: "002"
lng: "-76.9356386027744"
lat: "38.98703195"
,
name: "Health Center"
code: ""
number: "140"
lng: "-76.9447460619769"
lat: "38.9872158"
,
name: "Heavy Equipment Building"
code: ""
number: "216"
lng: "-76.9338248720072"
lat: "38.98591735"
,
name: "Herbert Wells Ice Rink"
code: ""
number: "F07"
lng: "-76.923644"
lat: "38.97594655"
,
name: "Holzapfel Hall (Horticulture)"
code: "HZF"
number: "074"
lng: "-76.9418893729789"
lat: "38.98687755"
,
name: "Hope Lutheran Church"
code: ""
number: "c103"
lng: "-76.9440416856427"
lat: "38.98023825"
,
name: "Hornbake Library"
code: "HBK"
number: "147"
lng: "-76.9415740921482"
lat: "38.9881767"
,
name: "Horse Barn"
code: ""
number: "108"
lng: "-76.9408036631471"
lat: "38.9916839"
,
name: "Howard Hall (Residence Hall)"
code: "HOW"
number: "028"
lng: "-76.9419727992265"
lat: "38.98196065"
,
name: "HVAC Building"
code: ""
number: "056"
lng: "-76.9437313"
lat: "38.9883195"
,
name: "Indoor Practice Facility"
code: ""
number: "309"
lng: "-76.9543808895367"
lat: "38.9896622"
,
name: "Institute for Physical Science and Technology"
code: "IPT"
number: "085"
lng: "-76.94097235"
lat: "38.9909191"
,
name: "Institute for Physical Science and Technology Storage"
code: "IPT"
number: "086"
lng: "-76.941282950122"
lat: "38.9908276"
,
name: "Instructional Television Facility"
code: "ITV"
number: "045"
lng: "-76.9383093000325"
lat: "38.98957525"
,
name: "J. Logan and Louise Schutz Football Practice Complex"
code: ""
number: "d100"
lng: "-76.9441988"
lat: "38.9902169"
,
name: "J.H. Kehoe Track and Ludwig Field"
code: ""
number: "388"
lng: "-76.9505711919375"
lat: "38.9878761"
,
name: "J.M. Patterson Building"
code: "JMP"
number: "083"
lng: "-76.940295409728"
lat: "38.9905177"
,
name: "Jeong H. Kim Engineering Building"
code: "KEB"
number: "225"
lng: "-76.938025589911"
lat: "38.9909099"
,
name: "Jimenez Hall"
code: "JMZ"
number: "034"
lng: "-76.94455005"
lat: "38.9867867"
,
name: "Jull Hall"
code: "JUL"
number: "227"
lng: "-76.9435848"
lat: "38.99087705"
,
name: "Kappa Alpha Fraternity"
code: ""
number: "126"
lng: "-76.936671546034"
lat: "38.98468565"
,
name: "Kent Hall (Residence Hall)"
code: "KNT"
number: "022"
lng: "-76.9418433747954"
lat: "38.98324525"
,
name: "Knight Hall"
code: "KNI"
number: "417"
lng: "-76.9484189"
lat: "38.98677255"
,
name: "Laboratory for Physical Sciences"
code: ""
number: "F09"
lng: "-76.9439779039198"
lat: "39.00369935"
,
name: "Laboratory for Telecommunications Sciences"
code: ""
number: "F10"
lng: "-76.943618302473"
lat: "39.00494075"
,
name: "Lamda Chi Alpha Fraternity"
code: ""
number: "132"
lng: "-76.9340275582429"
lat: "38.98394255"
,
name: "LaPlata Beach"
code: ""
number: "d101"
lng: "-76.9449878688894"
lat: "38.9925556"
,
name: "LaPlata Hall (Residence Hall)"
code: "LPA"
number: "259"
lng: "-76.9458907104711"
lat: "38.9924497"
,
name: "LEAFHouse"
code: ""
number: "F11"
lng: "-76.9413782929899"
lat: "38.9996436"
,
name: "Lee Building"
code: ""
number: "071"
lng: "-76.9395264"
lat: "38.9853629"
,
name: "LeFrak Hall"
code: "LEF"
number: "038"
lng: "-76.9436347921034"
lat: "38.9836849"
,
name: "Leonardtown Community Center"
code: ""
number: "250"
lng: "-76.9334623293913"
lat: "38.9833564"
,
name: "Leonardtown Housing"
code: ""
number: "238"
lng: "-76.9329720567893"
lat: "38.98288495"
,
name: "Leonardtown Housing 12"
code: ""
number: "249"
lng: "-76.933234970697"
lat: "38.98369045"
,
name: "Leonardtown Housing1"
code: ""
number: "248"
lng: "-76.9334063277435"
lat: "38.98395015"
,
name: "Leonardtown Housing10"
code: ""
number: "239"
lng: "-76.9328295569528"
lat: "38.98259145"
,
name: "Leonardtown Housing12"
code: ""
number: "244"
lng: "-76.9337842335381"
lat: "38.9848148"
,
name: "Leonardtown Housing2"
code: ""
number: "242"
lng: "-76.932084393545"
lat: "38.9826975"
,
name: "Leonardtown Housing3"
code: ""
number: "249"
lng: "-76.93322662199752"
lat: "38.98369225"
,
name: "Leonardtown Housing4"
code: ""
number: "247"
lng: "-76.9330952978623"
lat: "38.9842544"
,
name: "Leonardtown Housing5"
code: ""
number: "246"
lng: "-76.9335417977186"
lat: "38.98432445"
,
name: "Leonardtown Housing6"
code: ""
number: "245"
lng: "-76.9333959355818"
lat: "38.9846063"
,
name: "Leonardtown Housing7"
code: ""
number: "243"
lng: "-76.9320609074038"
lat: "38.98295505"
,
name: "Leonardtown Housing8"
code: ""
number: "241"
lng: "-76.9322669026445"
lat: "38.98256985"
,
name: "Leonardtown Housing9"
code: ""
number: "240"
lng: "-76.932593405223"
lat: "38.98269485"
,
name: "Leonardtown Office Building"
code: ""
number: "201"
lng: "-76.9324305637608"
lat: "38.9837051"
,
name: "Lot"
code: ""
number: "p117"
lng: "-76.9466368038263"
lat: "38.99133375"
,
name: "Main Administration Building"
code: ""
number: "077"
lng: "-76.93980935"
lat: "38.9860173"
,
name: "Manufacturing Building"
code: ""
number: "148"
lng: "-76.9394357215756"
lat: "38.99286495"
,
name: "Marie Mount Hall"
code: "MMH"
number: "046"
lng: "-76.94075799224"
lat: "38.9850013"
,
name: "Martin Hall"
code: "EGR"
number: "088"
lng: "-76.9379744096244"
lat: "38.9888688"
,
name: "Mathematics Building"
code: "MTH"
number: "084"
lng: "-76.9390688130666"
lat: "38.98862265"
,
name: "McKeldin Library"
code: "MCK"
number: "035"
lng: "-76.9451004712142"
lat: "38.98598155"
,
name: "McKeldin Mall"
code: "MKM"
number: "d106"
lng: "-76.9422685216793"
lat: "38.9859886"
,
name: "Memorial Chapel"
code: ""
number: "009"
lng: "-76.9408661301378"
lat: "38.98415015"
,
name: "Metro Parking Garage"
code: ""
number: "p255"
lng: "-76.92753801615109"
lat: "38.97901515"
,
name: "MFRI Drill Tower"
code: ""
number: "195"
lng: "-76.9299067182804"
lat: "38.983559"
,
name: "MFRI Fire Extinguisher Training Facility"
code: ""
number: "194"
lng: "-76.9287741861655"
lat: "38.98401155"
,
name: "MFRI Office/Classroom Building"
code: ""
number: "199"
lng: "-76.9288594688342"
lat: "38.98332815"
,
name: "MFRI Structural Firefighting Building"
code: ""
number: "196"
lng: "-76.928304153809"
lat: "38.98365305"
,
name: "Microbiology Building"
code: "MCB"
number: "231"
lng: "-76.9434034376443"
lat: "38.98811525"
,
name: "Mitchell Building"
code: ""
number: "052"
lng: "-76.9395651325614"
lat: "38.98670195"
,
name: "Montgomery Hall (Residence Hall)"
code: "MNT"
number: "032"
lng: "-76.9396753369829"
lat: "38.9820505"
,
name: "Morrill Hall"
code: "MOR"
number: "040"
lng: "-76.944173786884"
lat: "38.9843126"
,
name: "Motor Transportation Facility"
code: ""
number: "011"
lng: "-76.9336288120943"
lat: "38.98618425"
,
name: "Motorcycle Storage Building"
code: ""
number: "020"
lng: "-76.9361307352417"
lat: "38.98564745"
,
name: "Mowatt Lane Parking Garage"
code: ""
number: "404"
lng: "-76.9455747840072"
lat: "38.98182585"
,
name: "Mowatt Lane Substation"
code: ""
number: "400"
lng: "-76.9477371"
lat: "38.98371275"
,
name: "Neutral Buoyancy Research Facility"
code: ""
number: "382"
lng: "-76.9389810221139"
lat: "38.99288895"
,
name: "NOAA Center for Weather and Climate Prediction"
code: ""
number: "F04"
lng: "-76.9247140879834"
lat: "38.97199165"
,
name: "Nyumburu Cultural Center"
code: "NCC"
number: "232"
lng: "-76.9438057866404"
lat: "38.9881339"
,
name: "Oakland Hall (Residence Hall)"
code: ""
number: "419"
lng: "-76.9492226797435"
lat: "38.9938547"
,
name: "Paint Branch Drive Visitors Lot"
code: ""
number: "p194"
lng: "-76.9376652129763"
lat: "38.99197045"
,
name: "Parking Lot 11b"
code: ""
number: "p100"
lng: "-76.9362203954441"
lat: "38.9937827"
,
name: "Parking Lot 11c"
code: ""
number: "p101"
lng: "-76.93793265456"
lat: "38.99389735"
,
name: "Parking Lot 11h"
code: ""
number: "p102"
lng: "-76.9353551969982"
lat: "38.98254495"
,
name: "Parking Lot 15"
code: ""
number: "p103"
lng: "-76.9357555594133"
lat: "38.9810048"
,
name: "Parking Lot 16a"
code: ""
number: "p104"
lng: "-76.93612975"
lat: "38.98352975"
,
name: "Parking Lot 16b"
code: ""
number: "p105"
lng: "-76.9364642095338"
lat: "38.9830317"
,
name: "Parking Lot 16c"
code: ""
number: "p106"
lng: "-76.9319854884635"
lat: "38.98323445"
,
name: "Parking Lot 16d"
code: ""
number: "p107"
lng: "-76.9323682"
lat: "38.98234055"
,
name: "Parking Lot 16e"
code: ""
number: "p108"
lng: "-76.933294157523"
lat: "38.982799"
,
name: "Parking lot 16f"
code: ""
number: "p109"
lng: "-76.9339050233523"
lat: "38.9831728"
,
name: "Parking Lot 16h"
code: ""
number: "p110"
lng: "-76.9315366103807"
lat: "38.98314145"
,
name: "Parking Lot 19"
code: ""
number: "p253"
lng: "-76.94504517113101"
lat: "38.98196015"
,
name: "Parking Lot 1b"
code: ""
number: "p111"
lng: "-76.9494408398075"
lat: "38.9870827"
,
name: "Parking Lot 1d"
code: ""
number: "p112"
lng: "-76.9506524287848"
lat: "38.9861006"
,
name: "Parking Lot 2a"
code: ""
number: "p113"
lng: "-76.9481586165606"
lat: "38.99420335"
,
name: "Parking Lot 2b"
code: ""
number: "p114"
lng: "-76.9505459201508"
lat: "38.9931883"
,
name: "Parking Lot 2c"
code: ""
number: "p115"
lng: "-76.9477756507565"
lat: "38.99230265"
,
name: "Parking Lot 2d"
code: ""
number: "p116"
lng: "-76.9455376145164"
lat: "38.99238785"
,
name: "Parking Lot 2f"
code: ""
number: "p118"
lng: "-76.9414993575919"
lat: "38.99257355"
,
name: "Parking Lot 2g"
code: ""
number: "p236"
lng: "-76.9472887836851"
lat: "38.99440375"
,
name: "Parking Lot 4b"
code: ""
number: "p121"
lng: "-76.9411816982757"
lat: "38.9970546"
,
name: "Parking Lot 4j"
code: ""
number: "p124"
lng: "-76.939228467483"
lat: "38.99721475"
,
name: "Parking Lot 4n"
code: ""
number: "p234"
lng: "-76.9432485836194"
lat: "38.9975986"
,
name: "Parking Lot 5"
code: ""
number: "p126"
lng: "-76.9409605"
lat: "38.9897447"
,
name: "Parking Lot 6"
code: ""
number: "p127"
lng: "-76.9441223999729"
lat: "38.9948837"
,
name: "Parking Lot 9b"
code: ""
number: "p128"
lng: "-76.9379480992313"
lat: "38.99443435"
,
name: "Parking Lot 9c"
code: ""
number: "p129"
lng: "-76.9392026889215"
lat: "38.994263"
,
name: "Parking Lot A"
code: ""
number: "p130"
lng: "-76.9445554411228"
lat: "38.98430415"
,
name: "Parking Lot A*1"
code: ""
number: "p131"
lng: "-76.94812275"
lat: "38.9862722"
,
name: "Parking Lot B"
code: ""
number: "p135"
lng: "-76.9409603"
lat: "38.98961455"
,
name: "Parking Lot BB"
code: ""
number: "p136"
lng: "-76.9437432752211"
lat: "38.98868945"
,
name: "Parking Lot C1"
code: ""
number: "p137"
lng: "-76.9373082470676"
lat: "38.98662605"
,
name: "Parking Lot C2"
code: ""
number: "p138"
lng: "-76.9386466960901"
lat: "38.98525575"
,
name: "Parking Lot CC1"
code: ""
number: "p140"
lng: "-76.9395347604447"
lat: "38.99236635"
,
name: "Parking Lot CC2"
code: ""
number: "p141"
lng: "-76.940687"
lat: "38.9916206"
,
name: "Parking Lot D"
code: ""
number: "p143"
lng: "-76.94503695"
lat: "38.98512635"
,
name: "Parking Lot D"
code: ""
number: "p257"
lng: "-76.9453188124738"
lat: "38.98552325"
,
name: "Parking Lot E"
code: ""
number: "p125"
lng: "-76.9371846"
lat: "38.98927645"
,
name: "Parking Lot E*1"
code: ""
number: "p146"
lng: "-76.93950635"
lat: "38.98895295"
,
name: "Parking Lot E*2"
code: ""
number: "p147"
lng: "-76.93874685"
lat: "38.98936235"
,
name: "Parking Lot EE"
code: ""
number: "p148"
lng: "-76.93777255"
lat: "38.98956665"
,
name: "Parking Lot F"
code: ""
number: "p149"
lng: "-76.9362193313411"
lat: "38.98280315"
,
name: "Parking Lot FF"
code: ""
number: "p150"
lng: "-76.9399114407625"
lat: "38.99431135"
,
name: "Parking Lot FF2"
code: ""
number: "p151"
lng: "-76.9392185247061"
lat: "38.9931298"
,
name: "Parking Lot G"
code: ""
number: "p258"
lng: "-76.9385438831386"
lat: "38.9919303"
,
name: "Parking Lot H1"
code: ""
number: "p152"
lng: "-76.9408901957625"
lat: "38.98714445"
,
name: "Parking Lot HH1"
code: ""
number: "p155"
lng: "-76.9437705364812"
lat: "38.9871964"
,
name: "Parking Lot HH2"
code: ""
number: "p156"
lng: "-76.9487217884698"
lat: "38.99179395"
,
name: "Parking Lot I*"
code: ""
number: "p158"
lng: "-76.9369211101377"
lat: "38.99243845"
,
name: "Parking Lot II1"
code: ""
number: "p239"
lng: "-76.93916665"
lat: "38.9896806"
,
name: "Parking Lot JJ1"
code: ""
number: "p159"
lng: "-76.9483331340347"
lat: "38.98539355"
,
name: "Parking Lot JJ2"
code: ""
number: "p160"
lng: "-76.9493049661468"
lat: "38.9850641"
,
name: "Parking Lot JJ3"
code: ""
number: "p241"
lng: "-76.9491042944466"
lat: "38.98614505"
,
name: "Parking Lot K*2"
code: ""
number: "p162"
lng: "-76.936078711566"
lat: "38.98664725"
,
name: "Parking Lot K*4"
code: ""
number: "p163"
lng: "-76.9348330271331"
lat: "38.986413"
,
name: "Parking Lot K*5"
code: ""
number: "p250"
lng: "-76.9349324800649"
lat: "38.98521105"
,
name: "Parking Lot K1"
code: ""
number: "p164"
lng: "-8564367.45346"
lat: "4719533.97406"
,
name: "Parking Lot K2"
code: ""
number: "p165"
lng: "-76.9346156087241"
lat: "38.98554405"
,
name: "Parking Lot K4"
code: ""
number: "p235"
lng: "-76.9429211998132"
lat: "38.99640555"
,
name: "Parking Lot K5"
code: ""
number: "p167"
lng: "-76.9363026926599"
lat: "38.98296465"
,
name: "Parking Lot K6"
code: ""
number: "p168"
lng: "-76.9548861803616"
lat: "38.99083205"
,
name: "Parking Lot KK"
code: ""
number: "p169"
lng: "-76.939126849749"
lat: "38.99074565"
,
name: "Parking Lot KK1"
code: ""
number: "p237"
lng: "-76.9390710660194"
lat: "38.99103585"
,
name: "Parking Lot L"
code: ""
number: "p170"
lng: "-76.9390440880309"
lat: "38.985601"
,
name: "Parking Lot L*"
code: ""
number: "p171"
lng: "-76.9395731763922"
lat: "38.98602365"
,
name: "Parking Lot ML*"
code: ""
number: "p254"
lng: "-76.94504546108689"
lat: "38.9820502"
,
name: "Parking Lot MM1"
code: ""
number: "p172"
lng: "-76.9442824224091"
lat: "38.99208515"
,
name: "Parking Lot MM2"
code: ""
number: "p173"
lng: "-76.9416584717757"
lat: "38.9924268"
,
name: "Parking Lot MM3"
code: ""
number: "p174"
lng: "-76.94307075"
lat: "38.99134275"
,
name: "Parking Lot N"
code: ""
number: "p178"
lng: "-76.9483259502209"
lat: "38.992758"
,
name: "Parking Lot N*"
code: ""
number: "p177"
lng: "-76.9496207605017"
lat: "38.99315285"
,
name: "Parking Lot N*"
code: ""
number: "p183"
lng: "-76.9488709100114"
lat: "38.99306485"
,
name: "Parking Lot N*1"
code: ""
number: "p246"
lng: "-76.9399408810552"
lat: "38.982763"
,
name: "Parking Lot N*2"
code: ""
number: "p175"
lng: "-76.9388435512149"
lat: "38.98243265"
,
name: "Parking Lot N*3"
code: ""
number: "p176"
lng: "-76.943423876098"
lat: "38.9826911"
,
name: "Parking Lot N3"
code: ""
number: "p179"
lng: "-76.9455457475625"
lat: "38.9927533"
,
name: "Parking Lot N4"
code: ""
number: "p180"
lng: "-76.94428505"
lat: "38.9925133"
,
name: "Parking Lot N5"
code: ""
number: "p181"
lng: "-76.9321558294979"
lat: "38.9835147"
,
name: "Parking Lot N7"
code: ""
number: "p182"
lng: "-76.9477738109564"
lat: "38.9925869"
,
name: "Parking Lot N9"
code: ""
number: "p184"
lng: "-76.946957493951"
lat: "38.99306985"
,
name: "Parking Lot O1"
code: ""
number: "p185"
lng: "-76.94791655"
lat: "38.98399525"
,
name: "Parking Lot O3"
code: ""
number: "p187"
lng: "-76.9483002446715"
lat: "38.98442185"
,
name: "Parking Lot O4"
code: ""
number: "p188"
lng: "-76.9462971999815"
lat: "38.9843053"
,
name: "Parking Lot O5"
code: ""
number: "p189"
lng: "-76.9477835"
lat: "38.98355155"
,
name: "Parking Lot P*"
code: ""
number: "p191"
lng: "-76.94270605"
lat: "38.9981942"
,
name: "Parking Lot P1"
code: ""
number: "p192"
lng: "-76.9431896439374"
lat: "38.998125"
,
name: "Parking Lot P2"
code: ""
number: "p193"
lng: "-76.9421021500106"
lat: "38.99898715"
,
name: "Parking Lot PP*"
code: ""
number: "p195"
lng: "-76.9403724850234"
lat: "38.99385655"
,
name: "Parking Lot PP1"
code: ""
number: "p196"
lng: "-76.9420853788895"
lat: "38.9933824"
,
name: "Parking Lot PP2"
code: ""
number: "p197"
lng: "-76.9406987273544"
lat: "38.99385655"
,
name: "Parking Lot Q"
code: ""
number: "p198"
lng: "-76.9438760840556"
lat: "38.9911695"
,
name: "Parking Lot Q1"
code: ""
number: "p262"
lng: "-76.9434800720619"
lat: "38.9911726"
,
name: "Parking Lot R*"
code: ""
number: "p199"
lng: "-76.9456938440546"
lat: "38.9893944"
,
name: "Parking Lot R3"
code: ""
number: "p200"
lng: "-76.9455308111322"
lat: "38.9902248"
,
name: "Parking Lot R4"
code: ""
number: "p201"
lng: "-76.95470385"
lat: "38.99062525"
,
name: "Parking Lot RR"
code: ""
number: "p203"
lng: "-76.9409602"
lat: "38.98948445"
,
name: "Parking Lot RR1"
code: ""
number: "p251"
lng: "-76.9344902879538"
lat: "38.9869681"
,
name: "Parking Lot RR2"
code: ""
number: "p265"
lng: "-76.9379107465909"
lat: "38.9948436"
,
name: "Parking Lot RR2"
code: ""
number: "p252"
lng: "-76.9343316115429"
lat: "38.9876128"
,
name: "Parking Lot S3"
code: ""
number: "p204"
lng: "-76.9430233373487"
lat: "38.9925145"
,
name: "Parking Lot S4"
code: ""
number: "p205"
lng: "-76.943739453674"
lat: "38.98246725"
,
name: "Parking Lot S5"
code: ""
number: "p206"
lng: "-76.9494324090596"
lat: "38.9933015"
,
name: "Parking Lot S7"
code: ""
number: "p207"
lng: "-76.9477725756751"
lat: "38.99283285"
,
name: "Parking Lot S8"
code: ""
number: "p208"
lng: "-76.9467035382421"
lat: "38.99288295"
,
name: "Parking Lot SD*"
code: ""
number: "p209"
lng: "-76.9489187705445"
lat: "38.9902444"
,
name: "Parking Lot SS1"
code: ""
number: "p211"
lng: "-76.9441549116248"
lat: "38.9937615"
,
name: "Parking Lot SS2"
code: ""
number: "p212"
lng: "-76.9463360969161"
lat: "38.99397245"
,
name: "Parking Lot SS3"
code: ""
number: "p213"
lng: "-76.94667609821"
lat: "38.9942645"
,
name: "Parking Lot T"
code: ""
number: "p215"
lng: "-76.9377298240488"
lat: "38.98986685"
,
name: "Parking Lot TT"
code: ""
number: "p217"
lng: "-76.94036895"
lat: "38.98852595"
,
name: "Parking Lot U1"
code: ""
number: "p218"
lng: "-76.9437394"
lat: "38.9826911"
,
name: "Parking Lot U6"
code: ""
number: "p260"
lng: "-76.9450241678599"
lat: "38.9828241"
,
name: "Parking Lot U6"
code: ""
number: "p247"
lng: "-76.9449759499812"
lat: "38.9828063"
,
name: "Parking Lot UU"
code: ""
number: "p224"
lng: "-76.9398296251258"
lat: "38.99054885"
,
name: "Parking Lot W"
code: ""
number: "p225"
lng: "-76.9413860371152"
lat: "38.98486265"
,
name: "Parking Lot W1"
code: ""
number: "p244"
lng: "-76.9424845941378"
lat: "38.98494595"
,
name: "Parking Lot XX1"
code: ""
number: "p226"
lng: "-76.935617290379"
lat: "38.9900403"
,
name: "Parking Lot XX2"
code: ""
number: "p227"
lng: "-76.9367968153632"
lat: "38.99147625"
,
name: "Parking Lot XX4"
code: ""
number: "p229"
lng: "-76.937826475598"
lat: "38.99316725"
,
name: "Parking Lot XX5"
code: ""
number: "p233"
lng: "-76.93692095"
lat: "38.9909658"
,
name: "Parking Lot Y"
code: ""
number: "p230"
lng: "-76.9419977518114"
lat: "38.9840286"
,
name: "Parking Lot YC"
code: ""
number: "p231"
lng: "-76.9480388560794"
lat: "38.993677"
,
name: "Parking Lot Z"
code: ""
number: "p240"
lng: "-76.9485514812047"
lat: "38.98813685"
,
name: "Parking Lot Z*"
code: ""
number: "p232"
lng: "-76.9467581"
lat: "38.9887473"
,
name: "Parking Lot Z1"
code: ""
number: "p238"
lng: "-76.9475152"
lat: "38.9890159"
,
name: "Patapsco Building"
code: ""
number: "805"
lng: "-76.9249429909109"
lat: "38.9767149"
,
name: "Patuxent Building"
code: ""
number: "010"
lng: "-76.943483756875"
lat: "38.98863025"
,
name: "Pest Control Trailer"
code: ""
number: "385"
lng: "-76.9335922924701"
lat: "38.98515375"
,
name: "Phi Kappa Tau"
code: ""
number: "130"
lng: "-76.93463845"
lat: "38.9845988"
,
name: "Phi Sigma Sigma Sorority"
code: ""
number: "171"
lng: "-76.9356071135738"
lat: "38.9813563"
,
name: "Physical Distribution Center (Terrapin Trader)"
code: ""
number: "383"
lng: "-76.9292279695603"
lat: "38.9819567"
,
name: "Physics Building"
code: "PHY"
number: "082"
lng: "-76.94007719774"
lat: "38.9886972"
,
name: "Pi Kappa Alpha Fraternity"
code: ""
number: "127"
lng: "-76.936200475"
lat: "38.9846297"
,
name: "Plant Operations and Maintenance"
code: ""
number: "217"
lng: "-76.9340028269637"
lat: "38.98600515"
,
name: "Plant Operations and Maintenance Shop Building"
code: ""
number: "055"
lng: "-76.9334591527876"
lat: "38.98503875"
,
name: "Plant Operations and Maintenance Shop2"
code: ""
number: "101"
lng: "-76.9344044678528"
lat: "38.98657335"
,
name: "Plant Operations and Maintenance Shop3"
code: ""
number: "212"
lng: "-76.9342274025325"
lat: "38.9867869"
,
name: "Plant Operations and Maintenance Shops"
code: ""
number: "006"
lng: "-76.93452175"
lat: "38.9858077"
,
name: "Plant Operations and Maintenance Warehouse"
code: ""
number: "012"
lng: "-76.9339846304955"
lat: "38.98636865"
,
name: "Plant Science Building"
code: "PLS"
number: "036"
lng: "-76.9413596237531"
lat: "38.98873715"
,
name: "Pocomoke Building"
code: ""
number: "007"
lng: "-76.9371601416079"
lat: "38.98295055"
,
name: "Police Impound"
code: ""
number: "p249"
lng: "-76.9312547643577"
lat: "38.98301005"
,
name: "Police Substation"
code: ""
number: "018"
lng: "-76.9355837584007"
lat: "38.9825402"
,
name: "Potomac Building"
code: ""
number: "092"
lng: "-76.93830195"
lat: "38.9903918"
,
name: "Preinkert Field House"
code: "PKT"
number: "054"
lng: "-76.9461320708538"
lat: "38.9844511"
,
name: "Presidents Residence"
code: ""
number: "164"
lng: "-76.9523499501432"
lat: "38.9885031"
,
name: "Prince Georges Hall"
code: "PGG"
number: "021"
lng: "-76.941826725"
lat: "38.9825857"
,
name: "Pump House"
code: ""
number: "200"
lng: "-76.9465122"
lat: "38.9886431"
,
name: "Queen Annes Hall"
code: "QAN"
number: "061"
lng: "-76.9460068545931"
lat: "38.98519025"
,
name: "Reckord Armory"
code: "ARM"
number: "078"
lng: "-76.9389662139866"
lat: "38.9860268"
,
name: "Recreation Artificial Turf Field"
code: ""
number: "d102"
lng: "-76.9391820716749"
lat: "38.9954899"
,
name: "Recycling Center"
code: ""
number: "107"
lng: "-76.9372141594106"
lat: "38.99566315"
,
name: "Regents Drive Parking Garage"
code: ""
number: "202"
lng: "-76.9414582777038"
lat: "38.9897313"
,
name: "Research Greenhouse"
code: ""
number: "398"
lng: "-76.942996197787"
lat: "38.997127"
,
name: "Ritchie Coliseum"
code: "RIT"
number: "004"
lng: "-76.936456447182"
lat: "38.98504805"
,
name: "Robert E. Taylor Softball Stadium"
code: ""
number: "409"
lng: "-76.9397458168949"
lat: "38.99629445"
,
name: "Rosenbloom Hillel Center for Jewish Life"
code: ""
number: "c105"
lng: "-76.9487503080679"
lat: "38.98270895"
,
name: "Rossborough Inn"
code: ""
number: "080"
lng: "-76.9376116836577"
lat: "38.9853423"
,
name: "Route One Annex"
code: ""
number: "e8400"
lng: "-76.9332763982874"
lat: "38.99373045"
,
name: "Samuel Riggs IV Alumni Center"
code: ""
number: "407"
lng: "-76.9490713634912"
lat: "38.9894604"
,
name: "Satellite Central Utilities Building 1"
code: "SCUB 1"
number: "019"
lng: "-76.9404483999786"
lat: "38.98202525"
,
name: "Satellite Central Utilities Building 2"
code: "SCUB 2"
number: "067"
lng: "-76.9446545340067"
lat: "38.9835376"
,
name: "Satellite Central Utilities Building 3"
code: "SCUB 3"
number: "392"
lng: "-76.9459512158262"
lat: "38.98909655"
,
name: "Satellite Central Utilities Building 4"
code: "SCUB 4"
number: "405"
lng: "-76.9383176"
lat: "38.98979345"
,
name: "School of Public Health"
code: "SPH"
number: "255"
lng: "-76.9431633838994"
lat: "38.9934922"
,
name: "Security Booth (Campus Drive)"
code: ""
number: "295"
lng: "-76.936510313403"
lat: "38.98857565"
,
name: "Security Booth (Stadium Drive)"
code: ""
number: "297"
lng: "-76.95045855"
lat: "38.99198405"
,
name: "Service Building (University of Maryland, Police)"
code: ""
number: "003"
lng: "-76.9361406358543"
lat: "38.98597645"
,
name: "Sheep Barn"
code: ""
number: "109"
lng: "-76.9414407668993"
lat: "38.99169565"
,
name: "Shipley Field House (Baseball)"
code: ""
number: "159"
lng: "-76.9441134376408"
lat: "38.9889419"
,
name: "Shoemaker Building"
code: "SHM"
number: "037"
lng: "-76.942745495777"
lat: "38.9839376"
,
name: "Shriver Laboratory"
code: "SHR"
number: "075"
lng: "-76.94191685"
lat: "38.98727505"
,
name: "Shuttle Bus Facility"
code: ""
number: "p123"
lng: "-76.9365504141656"
lat: "38.9955167"
,
name: "Shuttle Bus Facility"
code: ""
number: "013"
lng: "-76.9338891741985"
lat: "38.98692845"
,
name: "Shuttle Bus Trailer 2"
code: ""
number: "410"
lng: "-76.93407295"
lat: "38.9870785"
,
name: "Sigma Delta Tau Sorority"
code: ""
number: "174"
lng: "-76.93587565"
lat: "38.9806496"
,
name: "Sigma Kappa Sorority"
code: ""
number: "135"
lng: "-76.93514475"
lat: "38.9832453"
,
name: "Sigma Phi Epsilon Fraternity"
code: ""
number: "133"
lng: "-76.9341870726646"
lat: "38.98356725"
,
name: "Skinner Building"
code: "SKN"
number: "044"
lng: "-76.9418413304917"
lat: "38.98480955"
,
name: "Somerset Hall (Residence Hall)"
code: "SOM"
number: "063"
lng: "-76.9455473"
lat: "38.9850026"
,
name: "South Campus Commons 1"
code: ""
number: "996"
lng: "-76.9429384672295"
lat: "38.98211405"
,
name: "South Campus Commons 2"
code: ""
number: "997"
lng: "-76.942933891664"
lat: "38.9828397"
,
name: "South Campus Commons 3"
code: ""
number: "998"
lng: "-76.9397267748109"
lat: "38.98147685"
,
name: "South Campus Commons 4"
code: ""
number: "999"
lng: "-76.9414232588701"
lat: "38.98128715"
,
name: "South Campus Commons 5"
code: ""
number: "974"
lng: "-76.94470195"
lat: "38.98275075"
,
name: "South Campus Commons 6"
code: ""
number: "975"
lng: "-76.9446791864948"
lat: "38.98216555"
,
name: "South Campus Commons 7"
code: ""
number: "281"
lng: "-76.9445591280214"
lat: "38.9815547"
,
name: "South Campus Dining Hall"
code: "SDH"
number: "026"
lng: "-76.9436837393588"
lat: "38.983048"
,
name: "South Gate (Regents Dr. and Rt. 1)"
code: ""
number: "401"
lng: "-76.938869"
lat: "38.9823717"
,
name: "Special Education Trailer One"
code: ""
number: "104"
lng: "-76.94795265"
lat: "38.9869185"
,
name: "Special Education Trailer Two"
code: ""
number: "105"
lng: "-76.94795265"
lat: "38.9867484"
,
name: "Special Services Office Building"
code: ""
number: "100"
lng: "-76.93443985"
lat: "38.98651785"
,
name: "St. Mary's Hall (Residence Hall)"
code: "STM"
number: "062"
lng: "-76.9455983683182"
lat: "38.9870071"
,
name: "Stadium Drive Parking Garage"
code: "SDG"
number: "218"
lng: "-76.9489997913519"
lat: "38.9910407"
,
name: "Susquehanna Hall"
code: "SQH"
number: "233"
lng: "-76.943749743933"
lat: "38.9820745"
,
name: "Symons Hall"
code: "SYM"
number: "076"
lng: "-76.9406455474374"
lat: "38.9870811"
,
name: "Talbot Hall (Residence Hall)"
code: "TAL"
number: "030"
lng: "-76.9422561"
lat: "38.9833387"
,
name: "Taliaferro Hall"
code: "TLF"
number: "043"
lng: "-76.9430948392412"
lat: "38.98484635"
,
name: "Tau Kappa Epsilon Fraternity"
code: ""
number: "128"
lng: "-76.93563395"
lat: "38.9846469"
,
name: "Tawes Fine Arts Building"
code: "TWS"
number: "141"
lng: "-76.9483238494655"
lat: "38.98598645"
,
name: "Technology Advancement Building"
code: ""
number: "387"
lng: "-76.938615024697"
lat: "38.99251055"
,
name: "Technology Ventures Building"
code: ""
number: "806"
lng: "-76.9255078851059"
lat: "38.97964495"
,
name: "Temporary Building"
code: ""
number: "208"
lng: "-76.9337296097556"
lat: "38.9852606"
,
name: "Temporary Building1"
code: ""
number: "207"
lng: "-76.9327854891442"
lat: "38.9836276"
,
name: "Temporary Building2"
code: ""
number: "204"
lng: "-76.9328960630658"
lat: "38.983738"
,
name: "Terrapin Trail Parking Garage"
code: ""
number: "403"
lng: "-76.9433528356638"
lat: "38.99497"
,
name: "The Diner"
code: ""
number: "257"
lng: "-76.9466253202392"
lat: "38.9925679"
,
name: "The Domain"
code: ""
number: "a004"
lng: "-76.9493582912198"
lat: "38.98418695"
,
name: "The Varsity"
code: ""
number: "a003"
lng: "-76.9344594915205"
lat: "38.99146725"
,
name: "Track/Soccer Ticket Booth"
code: ""
number: "389"
lng: "-76.9498700750045"
lat: "38.9874593"
,
name: "Turner Hall (Dairy/Visitor Center)"
code: "TUR"
number: "079"
lng: "-76.9373371743758"
lat: "38.98614065"
,
name: "Tydings Hall"
code: "TYD"
number: "042"
lng: "-76.9440007655894"
lat: "38.9848516"
,
name: "Tyser Tower"
code: ""
number: "361"
lng: "-76.9478028839735"
lat: "38.98970955"
,
name: "UMUC Hotel"
code: ""
number: "348"
lng: "-76.9552482403172"
lat: "38.9860612"
,
name: "UMUC Inn and Conference Center (ICC)"
code: "ICC"
number: "345"
lng: "-76.9540770364453"
lat: "38.9860659"
,
name: "UMUC Student and Faculty Services Center"
code: "SFSC"
number: "346"
lng: "-76.9533396302397"
lat: "38.9866594"
,
name: "UMUC/University College Garage (PGUC)"
code: "PGUC"
number: "347"
lng: "-76.9544177389556"
lat: "38.98699405"
,
name: "Union Lane Parking Garage"
code: "ULG"
number: "179"
lng: "-76.9458968038597"
lat: "38.98840855"
,
name: "University Baptist Church"
code: ""
number: "c100"
lng: "-76.95374930076"
lat: "38.98425975"
,
name: "University Hills Apartments1"
code: ""
number: "288"
lng: "-76.9562455643617"
lat: "38.98514415"
,
name: "University Hills Apartments2"
code: ""
number: "287"
lng: "-76.9572075604853"
lat: "38.9850375"
,
name: "University Hills Apartments3"
code: ""
number: "286"
lng: "-76.9582383073659"
lat: "38.98494235"
,
name: "University Hills Apartments4"
code: ""
number: "292"
lng: "-76.9583539620815"
lat: "38.98442205"
,
name: "University Hills Apartments5"
code: ""
number: "291"
lng: "-76.9573552914719"
lat: "38.9844605"
,
name: "University Hills Apartments6"
code: ""
number: "290"
lng: "-76.9562790737231"
lat: "38.9843953"
,
name: "University Hills Apartments7"
code: ""
number: "289"
lng: "-76.9558676954921"
lat: "38.98447435"
,
name: "University United Methodist Church"
code: ""
number: "c101"
lng: "-76.9513539156316"
lat: "38.98421265"
,
name: "University View"
code: ""
number: "a001"
lng: "-76.9344298975591"
lat: "38.99256455"
,
name: "University View II"
code: ""
number: "a002"
lng: "-76.9336210376858"
lat: "38.99251945"
,
name: "Univesity Research Center (North)"
code: ""
number: "F05"
lng: "-76.9222887214757"
lat: "38.9722927"
,
name: "Univesity Research Center (South)"
code: ""
number: "F06"
lng: "-76.9218988073354"
lat: "38.970879"
,
name: "Van Munching Hall"
code: "VMH"
number: "039"
lng: "-76.9470382916646"
lat: "38.9830467"
,
name: "Varsity Sports Teamhouse"
code: ""
number: "158"
lng: "-76.9451814170622"
lat: "38.99006865"
,
name: "Washington Hall (Residence Hall)"
code: "WSH"
number: "023"
lng: "-76.9413924749198"
lat: "38.9818076"
,
name: "Wicomico Hall (Residence Hall)"
code: "WIC"
number: "069"
lng: "-76.9458442795105"
lat: "38.9837427"
,
name: "Wind Tunnel Building"
code: "WTU"
number: "081"
lng: "-76.9368484609687"
lat: "38.9899002"
,
name: "Woods Hall"
code: "WDS"
number: "047"
lng: "-76.9418451233709"
lat: "38.9851106"
,
name: "Worchester Hall (Residence Hall)"
code: "WOR"
number: "051"
lng: "-76.9449993420465"
lat: "38.98467285"
,
name: "Zeta Beta Tau"
code: ""
number: "139"
lng: "-76.9371918048901"
lat: "38.9832052"
,
name: "Zeta Psi Fraternity"
code: ""
number: "138"
lng: "-76.9366895"
lat: "38.9832584"
,
name: "Zeta Tau Alpha"
code: ""
number: "137"
lng: "-76.9361831249379"
lat: "38.98325535"
]
| 121242 | mongoose = require('mongoose')
{Building, User, Course} = require './models'
exports.bootstrap = ->
for b in BUILDINGS
Building.update {number: b.number}, b, upsert: true, ->
User.findOrCreate('zfogg')
User.findOrCreate('zoodle')
exports.buildings = BUILDINGS = [
name: "251 North"
code: ""
number: "251"
lng: "-76.9496090325357"
lat: "38.99274005"
,
name: "94th Aero Squadron"
code: ""
number: "F08"
lng: "-76.9210122711411"
lat: "38.9781702"
,
name: "<NAME> Federal Building FDA"
code: "FDA"
number: "F01"
lng: "-76.9261965846492"
lat: "38.9770124"
,
name: "<NAME>"
code: "AVW"
number: "115"
lng: "-76.9363418604721"
lat: "38.99079905"
,
name: "<NAME> Student Union Buildings"
code: "SSU"
number: "163"
lng: "-76.9447218233957"
lat: "38.98816455"
,
name: "Agriculture Shed"
code: ""
number: "102"
lng: "-76.9407337234236"
lat: "38.99198175"
,
name: "Allegany Hall (Residence Hall)"
code: "AGY"
number: "024"
lng: "-76.9414146"
lat: "38.98157935"
,
name: "Alpha Chi Omega Sorority"
code: ""
number: "172"
lng: "-76.9360293156412"
lat: "38.9814223"
,
name: "Alpha Delta Pi Sorority"
code: ""
number: "170"
lng: "-76.9352079623363"
lat: "38.981239"
,
name: "Alpha Epsilon Phi Sorority"
code: ""
number: "136"
lng: "-76.93562535"
lat: "38.98324535"
,
name: "Alpha Epsilon Pi"
code: ""
number: "129"
lng: "-76.9351584"
lat: "38.9846571"
,
name: "Alpha Phi Sorority"
code: ""
number: "176"
lng: "-76.9352442035259"
lat: "38.9808974"
,
name: "Alpha Sigma Phi Fraternity"
code: ""
number: "134"
lng: "-76.9346571906316"
lat: "38.98329385"
,
name: "American Center for Physics"
code: ""
number: "F03"
lng: "-76.9299850652696"
lat: "38.97152715"
,
name: "Animal Science Service Building"
code: ""
number: "103"
lng: "-76.9410828738213"
lat: "38.99200365"
,
name: "Animal Science/Agricultural Engineering Building"
code: "ANS"
number: "142"
lng: "-76.9394592785606"
lat: "38.99164625"
,
name: "<NAME>"
code: ""
number: "008"
lng: "-76.9400675383842"
lat: "38.9822303"
,
name: "<NAME>"
code: "ANA"
number: "060"
lng: "-76.94673185"
lat: "38.98595025"
,
name: "<NAME>"
code: ""
number: "156"
lng: "-76.9487330028218"
lat: "38.99193245"
,
name: "Aquatics Center"
code: ""
number: "399"
lng: "-76.9464978414166"
lat: "38.9933884"
,
name: "Architecture Building"
code: "ARC"
number: "145"
lng: "-76.9475462198212"
lat: "38.98423555"
,
name: "Art-Sociology Building"
code: "ASY"
number: "146"
lng: "-76.9478947752967"
lat: "38.98528145"
,
name: "Baltimore Hall (Residence Hall)"
code: "BAL"
number: "016"
lng: "-76.9421925409139"
lat: "38.9822469"
,
name: "Bel Air Hall (Residence Hall)"
code: "BEL"
number: "099"
lng: "-76.9426116"
lat: "38.99281705"
,
name: "<NAME>"
code: "EDU"
number: "143"
lng: "-76.9474215"
lat: "38.9867095"
,
name: "Biology-Psychology Building"
code: "BPS"
number: "144"
lng: "-76.9425929312518"
lat: "38.98872265"
,
name: "Biomolecular Sciences Building"
code: ""
number: "296"
lng: "-76.9376944355845"
lat: "38.99285415"
,
name: "Biosciences Research Building"
code: "BRB"
number: "413"
lng: "-76.9428094130756"
lat: "38.98897595"
,
name: "<NAME>"
code: ""
number: "119"
lng: "-76.94089305"
lat: "38.9922976"
,
name: "Building Services Operations Building"
code: ""
number: "215"
lng: "-76.9346037302467"
lat: "38.98626085"
,
name: "<NAME> Stadium"
code: "BRD"
number: "364"
lng: "-76.9475181930938"
lat: "38.99033895"
,
name: "Byrd Stadium Maintenance Building"
code: ""
number: "369"
lng: "-76.9458711262378"
lat: "38.99025255"
,
name: "<NAME> (Residence Hall)"
code: "CAL"
number: "015"
lng: "-76.9423134911421"
lat: "38.98292125"
,
name: "Cambridge Community Center"
code: "CCC"
number: "097"
lng: "-76.9429981"
lat: "38.99217805"
,
name: "Cam<NAME> (Residence Hall)"
code: "CAM"
number: "096"
lng: "-76.9430174173239"
lat: "38.99172105"
,
name: "<NAME> (Residence Hall)"
code: "CAR"
number: "070"
lng: "-76.9457925"
lat: "38.9834966"
,
name: "<NAME> (Residence Hall)"
code: "CRL"
number: "065"
lng: "-76.9456253332123"
lat: "38.9839834"
,
name: "Cath<NAME>"
code: ""
number: "c104"
lng: "-76.9450898077953"
lat: "38.98056895"
,
name: "<NAME>"
code: ""
number: "110"
lng: "-76.9403740849109"
lat: "38.9923982"
,
name: "<NAME>"
code: "CEC"
number: "017"
lng: "-76.9416573737624"
lat: "38.9829463"
,
name: "Center for the Advanced Study of Language"
code: ""
number: "F02"
lng: "-76.9256352793971"
lat: "38.9741989"
,
name: "Center for Young Children"
code: ""
number: "381"
lng: "-76.9484830471325"
lat: "38.99352845"
,
name: "Central Animal Resources Facility"
code: ""
number: "087"
lng: "-76.9383409902712"
lat: "38.9917051"
,
name: "<NAME> (Residence Hall)"
code: "CEN"
number: "098"
lng: "-76.9421248952804"
lat: "38.992271"
,
name: "Chabad Jewish Student Center"
code: ""
number: "c102"
lng: "-76.9332465412442"
lat: "38.9803053"
,
name: "Challenge Course"
code: ""
number: "d108"
lng: "-76.9452623440046"
lat: "38.99415265"
,
name: "<NAME>"
code: ""
number: "d103"
lng: "-76.9389471052562"
lat: "38.9837497"
,
name: "<NAME> (Residence Hall)"
code: "CHL"
number: "025"
lng: "-76.9405193937813"
lat: "38.9816226"
,
name: "Chemical and Nuclear Engineering Building"
code: "CHE"
number: "090"
lng: "-76.9395374654513"
lat: "38.99060895"
,
name: "Chemistry Building"
code: "CHM"
number: "091"
lng: "-76.940171718657"
lat: "38.98959475"
,
name: "Chesapeake Building"
code: ""
number: "338"
lng: "-76.94215145"
lat: "38.99828345"
,
name: "Chestertown Hall (Residence Hall)"
code: "CHS"
number: "121"
lng: "-76.9434348"
lat: "38.99280355"
,
name: "Chincoteague Hall"
code: ""
number: "059"
lng: "-76.9445192324004"
lat: "38.9852322"
,
name: "<NAME> Performing Arts Center"
code: "PAC"
number: "386"
lng: "-76.9504434053224"
lat: "38.9906807"
,
name: "<NAME>"
code: ""
number: "d107"
lng: "-76.9458984242081"
lat: "38.99419435"
,
name: "Cole Student Activities Building"
code: "COL"
number: "162"
lng: "-76.9467662555511"
lat: "38.98797155"
,
name: "College Park Fire Station"
code: ""
number: "802"
lng: "-76.9336943536548"
lat: "38.9903857"
,
name: "College Park LDS Institute of Religion"
code: ""
number: "c106"
lng: "-76.9474186908036"
lat: "38.981888"
,
name: "College Park Metro Station"
code: ""
number: "p256"
lng: "-76.92800716569322"
lat: "38.97825115"
,
name: "Comcast Center"
code: ""
number: "360"
lng: "-76.9413366066757"
lat: "38.9956305"
,
name: "Computer and Space Sciences Building"
code: "CSS"
number: "224"
lng: "-76.9425669540912"
lat: "38.9909812"
,
name: "Computer Science Instructional Center"
code: "CSI"
number: "406"
lng: "-76.9361946"
lat: "38.98999465"
,
name: "Cumberland Hall (Residence Hall)"
code: "CBD"
number: "122"
lng: "-76.9439244117559"
lat: "38.9922614"
,
name: "<NAME>"
code: ""
number: "131"
lng: "-76.9341748748757"
lat: "38.98432725"
,
name: "<NAME>"
code: ""
number: "175"
lng: "-76.9354323628479"
lat: "38.9805908"
,
name: "<NAME> <NAME>"
code: ""
number: "173"
lng: "-76.9363517104313"
lat: "38.9807542"
,
name: "D<NAME> Hall (Residence Hall)"
code: "DEN"
number: "252"
lng: "-76.9499786106729"
lat: "38.99223865"
,
name: "D<NAME>ster Hall (Residence Hall)"
code: "DOR"
number: "064"
lng: "-76.9461812019092"
lat: "38.9867742"
,
name: "<NAME>"
code: ""
number: "p248"
lng: "-76.931625967854"
lat: "38.98291445"
,
name: "<NAME>on Hall (Residence Hall)"
code: "EAS"
number: "253"
lng: "-76.9502564099265"
lat: "38.9930193"
,
name: "<NAME> Hall (Residence Hall)"
code: "ELK"
number: "254"
lng: "-76.9489576649778"
lat: "38.9925108"
,
name: "<NAME> Hall (Residence Hall)"
code: "ELL"
number: "256"
lng: "-76.9466586443134"
lat: "38.9918228"
,
name: "Energy Plant"
code: ""
number: "001"
lng: "-76.935683449548"
lat: "38.98585305"
,
name: "Energy Research Facility"
code: ""
number: "223"
lng: "-76.936862109123"
lat: "38.99192655"
,
name: "Engineering Annex"
code: "EAB"
number: "093"
lng: "-76.9370872500394"
lat: "38.99073865"
,
name: "Engineering Fields"
code: ""
number: "d104"
lng: "-76.9375298581359"
lat: "38.98753725"
,
name: "Engineering Laboratory Building"
code: "EGL"
number: "089"
lng: "-76.9379467111811"
lat: "38.98926235"
,
name: "Environmental Services Facility"
code: ""
number: "344"
lng: "-76.9437334181758"
lat: "38.9965269"
,
name: "Episcopal Student Center"
code: ""
number: "c107"
lng: "-76.9358864728133"
lat: "38.9820118"
,
name: "Eppley Campus Recreation Center"
code: "CRC"
number: "068"
lng: "-76.9452703132821"
lat: "38.99358165"
,
name: "Field Hockey and Lacrosse Complex"
code: ""
number: "414"
lng: "-76.9368614977832"
lat: "38.99472745"
,
name: "<NAME>"
code: "KEY"
number: "048"
lng: "-76.9430892753028"
lat: "38.98506175"
,
name: "Fraternity Fields"
code: ""
number: "d105"
lng: "-76.9358798758218"
lat: "38.9839311"
,
name: "<NAME> (Residence Hall)"
code: "FRD"
number: "029"
lng: "-76.9407467785094"
lat: "38.98204525"
,
name: "<NAME> (Residence Hall)"
code: "GRT"
number: "031"
lng: "-76.9426804"
lat: "38.98323575"
,
name: "Gate House"
code: ""
number: "299"
lng: "-76.9357643250823"
lat: "38.98864785"
,
name: "Geology Building"
code: "GEO"
number: "237"
lng: "-76.9409099576973"
lat: "38.9881304"
,
name: "Golf Course Clubhouse"
code: "GLF"
number: "166"
lng: "-76.9547070820405"
lat: "38.99113325"
,
name: "Golf Course Maintenance"
code: ""
number: "314"
lng: "-76.9531787893985"
lat: "38.9973127"
,
name: "Golub Property"
code: ""
number: "808"
lng: "-76.9301602208332"
lat: "38.96954285"
,
name: "Gossett Football Team House"
code: ""
number: "379"
lng: "-76.9462999828512"
lat: "38.98978335"
,
name: "Graduate Garden Apartment Complex"
code: ""
number: "260"
lng: "-76.9573161915849"
lat: "38.9845594"
,
name: "Ground Herbicide/Pesticide Storage Building"
code: ""
number: "327"
lng: "-76.94380855"
lat: "38.9956043"
,
name: "Grounds Material and Equipment Building"
code: ""
number: "328"
lng: "-76.94330185"
lat: "38.99561845"
,
name: "Grounds Office Building"
code: ""
number: "050"
lng: "-76.9437121"
lat: "38.99586895"
,
name: "Grounds Operations and Maintenance Facility"
code: ""
number: "124"
lng: "-76.9428899"
lat: "38.9955292"
,
name: "<NAME>"
code: "HJP"
number: "073"
lng: "-76.9432766035148"
lat: "38.98708535"
,
name: "H<NAME>stown Hall (Residence Hall)"
code: "HAG"
number: "258"
lng: "-76.9474136961276"
lat: "38.9924384"
,
name: "<NAME> Hall (Residence Hall)"
code: "HRF"
number: "014"
lng: "-76.9408308448032"
lat: "38.9824913"
,
name: "<NAME>"
code: "HAR"
number: "002"
lng: "-76.9356386027744"
lat: "38.98703195"
,
name: "Health Center"
code: ""
number: "140"
lng: "-76.9447460619769"
lat: "38.9872158"
,
name: "Heavy Equipment Building"
code: ""
number: "216"
lng: "-76.9338248720072"
lat: "38.98591735"
,
name: "<NAME>"
code: ""
number: "F07"
lng: "-76.923644"
lat: "38.97594655"
,
name: "<NAME> (Horticulture)"
code: "HZF"
number: "074"
lng: "-76.9418893729789"
lat: "38.98687755"
,
name: "<NAME>"
code: ""
number: "c103"
lng: "-76.9440416856427"
lat: "38.98023825"
,
name: "<NAME>"
code: "HBK"
number: "147"
lng: "-76.9415740921482"
lat: "38.9881767"
,
name: "<NAME>"
code: ""
number: "108"
lng: "-76.9408036631471"
lat: "38.9916839"
,
name: "<NAME> (Residence Hall)"
code: "HOW"
number: "028"
lng: "-76.9419727992265"
lat: "38.98196065"
,
name: "HVAC Building"
code: ""
number: "056"
lng: "-76.9437313"
lat: "38.9883195"
,
name: "Indoor Practice Facility"
code: ""
number: "309"
lng: "-76.9543808895367"
lat: "38.9896622"
,
name: "Institute for Physical Science and Technology"
code: "IPT"
number: "085"
lng: "-76.94097235"
lat: "38.9909191"
,
name: "Institute for Physical Science and Technology Storage"
code: "IPT"
number: "086"
lng: "-76.941282950122"
lat: "38.9908276"
,
name: "Instructional Television Facility"
code: "ITV"
number: "045"
lng: "-76.9383093000325"
lat: "38.98957525"
,
name: "<NAME> and <NAME> Football Practice Complex"
code: ""
number: "d100"
lng: "-76.9441988"
lat: "38.9902169"
,
name: "<NAME> and <NAME>"
code: ""
number: "388"
lng: "-76.9505711919375"
lat: "38.9878761"
,
name: "<NAME>"
code: "JMP"
number: "083"
lng: "-76.940295409728"
lat: "38.9905177"
,
name: "<NAME> Building"
code: "KEB"
number: "225"
lng: "-76.938025589911"
lat: "38.9909099"
,
name: "<NAME>"
code: "JMZ"
number: "034"
lng: "-76.94455005"
lat: "38.9867867"
,
name: "<NAME>"
code: "JUL"
number: "227"
lng: "-76.9435848"
lat: "38.99087705"
,
name: "<NAME>"
code: ""
number: "126"
lng: "-76.936671546034"
lat: "38.98468565"
,
name: "<NAME> (Residence Hall)"
code: "KNT"
number: "022"
lng: "-76.9418433747954"
lat: "38.98324525"
,
name: "<NAME>"
code: "KNI"
number: "417"
lng: "-76.9484189"
lat: "38.98677255"
,
name: "Laboratory for Physical Sciences"
code: ""
number: "F09"
lng: "-76.9439779039198"
lat: "39.00369935"
,
name: "Laboratory for Telecommunications Sciences"
code: ""
number: "F10"
lng: "-76.943618302473"
lat: "39.00494075"
,
name: "<NAME>"
code: ""
number: "132"
lng: "-76.9340275582429"
lat: "38.98394255"
,
name: "<NAME>"
code: ""
number: "d101"
lng: "-76.9449878688894"
lat: "38.9925556"
,
name: "<NAME> (Residence Hall)"
code: "LPA"
number: "259"
lng: "-76.9458907104711"
lat: "38.9924497"
,
name: "<NAME>"
code: ""
number: "F11"
lng: "-76.9413782929899"
lat: "38.9996436"
,
name: "<NAME>"
code: ""
number: "071"
lng: "-76.9395264"
lat: "38.9853629"
,
name: "<NAME>"
code: "LEF"
number: "038"
lng: "-76.9436347921034"
lat: "38.9836849"
,
name: "<NAME> Community Center"
code: ""
number: "250"
lng: "-76.9334623293913"
lat: "38.9833564"
,
name: "Leonardtown Housing"
code: ""
number: "238"
lng: "-76.9329720567893"
lat: "38.98288495"
,
name: "Leonardtown Housing 12"
code: ""
number: "249"
lng: "-76.933234970697"
lat: "38.98369045"
,
name: "Leonardtown Housing1"
code: ""
number: "248"
lng: "-76.9334063277435"
lat: "38.98395015"
,
name: "Leonardtown Housing10"
code: ""
number: "239"
lng: "-76.9328295569528"
lat: "38.98259145"
,
name: "Le<NAME>ardtown Housing12"
code: ""
number: "244"
lng: "-76.9337842335381"
lat: "38.9848148"
,
name: "Le<NAME>ardtown Housing2"
code: ""
number: "242"
lng: "-76.932084393545"
lat: "38.9826975"
,
name: "<NAME>town Housing3"
code: ""
number: "249"
lng: "-76.93322662199752"
lat: "38.98369225"
,
name: "<NAME>town Housing4"
code: ""
number: "247"
lng: "-76.9330952978623"
lat: "38.9842544"
,
name: "Leonardtown Housing5"
code: ""
number: "246"
lng: "-76.9335417977186"
lat: "38.98432445"
,
name: "<NAME>"
code: ""
number: "245"
lng: "-76.9333959355818"
lat: "38.9846063"
,
name: "<NAME>"
code: ""
number: "243"
lng: "-76.9320609074038"
lat: "38.98295505"
,
name: "<NAME>"
code: ""
number: "241"
lng: "-76.9322669026445"
lat: "38.98256985"
,
name: "<NAME>9"
code: ""
number: "240"
lng: "-76.932593405223"
lat: "38.98269485"
,
name: "<NAME> Office Building"
code: ""
number: "201"
lng: "-76.9324305637608"
lat: "38.9837051"
,
name: "<NAME>"
code: ""
number: "p117"
lng: "-76.9466368038263"
lat: "38.99133375"
,
name: "Main Administration Building"
code: ""
number: "077"
lng: "-76.93980935"
lat: "38.9860173"
,
name: "Manufacturing Building"
code: ""
number: "148"
lng: "-76.9394357215756"
lat: "38.99286495"
,
name: "<NAME>"
code: "MMH"
number: "046"
lng: "-76.94075799224"
lat: "38.9850013"
,
name: "<NAME>"
code: "EGR"
number: "088"
lng: "-76.9379744096244"
lat: "38.9888688"
,
name: "Mathematics Building"
code: "MTH"
number: "084"
lng: "-76.9390688130666"
lat: "38.98862265"
,
name: "<NAME>"
code: "MCK"
number: "035"
lng: "-76.9451004712142"
lat: "38.98598155"
,
name: "<NAME>"
code: "MKM"
number: "d106"
lng: "-76.9422685216793"
lat: "38.9859886"
,
name: "<NAME>"
code: ""
number: "009"
lng: "-76.9408661301378"
lat: "38.98415015"
,
name: "<NAME> Park<NAME> Gar<NAME>"
code: ""
number: "p255"
lng: "-76.92753801615109"
lat: "38.97901515"
,
name: "<NAME>FRI Drill Tower"
code: ""
number: "195"
lng: "-76.9299067182804"
lat: "38.983559"
,
name: "MFRI Fire Extinguisher Training Facility"
code: ""
number: "194"
lng: "-76.9287741861655"
lat: "38.98401155"
,
name: "MFRI Office/Classroom Building"
code: ""
number: "199"
lng: "-76.9288594688342"
lat: "38.98332815"
,
name: "MFRI Structural Firefighting Building"
code: ""
number: "196"
lng: "-76.928304153809"
lat: "38.98365305"
,
name: "Microbiology Building"
code: "MCB"
number: "231"
lng: "-76.9434034376443"
lat: "38.98811525"
,
name: "<NAME>"
code: ""
number: "052"
lng: "-76.9395651325614"
lat: "38.98670195"
,
name: "<NAME> (Residence Hall)"
code: "MNT"
number: "032"
lng: "-76.9396753369829"
lat: "38.9820505"
,
name: "<NAME>"
code: "MOR"
number: "040"
lng: "-76.944173786884"
lat: "38.9843126"
,
name: "Motor Transportation Facility"
code: ""
number: "011"
lng: "-76.9336288120943"
lat: "38.98618425"
,
name: "Motorcycle Storage Building"
code: ""
number: "020"
lng: "-76.9361307352417"
lat: "38.98564745"
,
name: "M<NAME>att Lane Parking Garage"
code: ""
number: "404"
lng: "-76.9455747840072"
lat: "38.98182585"
,
name: "Mowatt Lane Substation"
code: ""
number: "400"
lng: "-76.9477371"
lat: "38.98371275"
,
name: "Neutral Buoyancy Research Facility"
code: ""
number: "382"
lng: "-76.9389810221139"
lat: "38.99288895"
,
name: "NOAA Center for Weather and Climate Prediction"
code: ""
number: "F04"
lng: "-76.9247140879834"
lat: "38.97199165"
,
name: "Nyumburu Cultural Center"
code: "NCC"
number: "232"
lng: "-76.9438057866404"
lat: "38.9881339"
,
name: "Oakland Hall (Residence Hall)"
code: ""
number: "419"
lng: "-76.9492226797435"
lat: "38.9938547"
,
name: "Paint Branch Drive Visitors Lot"
code: ""
number: "p194"
lng: "-76.9376652129763"
lat: "38.99197045"
,
name: "Parking Lot 11b"
code: ""
number: "p100"
lng: "-76.9362203954441"
lat: "38.9937827"
,
name: "Parking Lot 11c"
code: ""
number: "p101"
lng: "-76.93793265456"
lat: "38.99389735"
,
name: "Parking Lot 11h"
code: ""
number: "p102"
lng: "-76.9353551969982"
lat: "38.98254495"
,
name: "Parking Lot 15"
code: ""
number: "p103"
lng: "-76.9357555594133"
lat: "38.9810048"
,
name: "Parking Lot 16a"
code: ""
number: "p104"
lng: "-76.93612975"
lat: "38.98352975"
,
name: "Parking Lot 16b"
code: ""
number: "p105"
lng: "-76.9364642095338"
lat: "38.9830317"
,
name: "Parking Lot 16c"
code: ""
number: "p106"
lng: "-76.9319854884635"
lat: "38.98323445"
,
name: "Parking Lot 16d"
code: ""
number: "p107"
lng: "-76.9323682"
lat: "38.98234055"
,
name: "Parking Lot 16e"
code: ""
number: "p108"
lng: "-76.933294157523"
lat: "38.982799"
,
name: "Parking lot 16f"
code: ""
number: "p109"
lng: "-76.9339050233523"
lat: "38.9831728"
,
name: "Parking Lot 16h"
code: ""
number: "p110"
lng: "-76.9315366103807"
lat: "38.98314145"
,
name: "Parking Lot 19"
code: ""
number: "p253"
lng: "-76.94504517113101"
lat: "38.98196015"
,
name: "Parking Lot 1b"
code: ""
number: "p111"
lng: "-76.9494408398075"
lat: "38.9870827"
,
name: "Parking Lot 1d"
code: ""
number: "p112"
lng: "-76.9506524287848"
lat: "38.9861006"
,
name: "Parking Lot 2a"
code: ""
number: "p113"
lng: "-76.9481586165606"
lat: "38.99420335"
,
name: "Parking Lot 2b"
code: ""
number: "p114"
lng: "-76.9505459201508"
lat: "38.9931883"
,
name: "Parking Lot 2c"
code: ""
number: "p115"
lng: "-76.9477756507565"
lat: "38.99230265"
,
name: "Parking Lot 2d"
code: ""
number: "p116"
lng: "-76.9455376145164"
lat: "38.99238785"
,
name: "Parking Lot 2f"
code: ""
number: "p118"
lng: "-76.9414993575919"
lat: "38.99257355"
,
name: "Parking Lot 2g"
code: ""
number: "p236"
lng: "-76.9472887836851"
lat: "38.99440375"
,
name: "Parking Lot 4b"
code: ""
number: "p121"
lng: "-76.9411816982757"
lat: "38.9970546"
,
name: "Parking Lot 4j"
code: ""
number: "p124"
lng: "-76.939228467483"
lat: "38.99721475"
,
name: "Parking Lot 4n"
code: ""
number: "p234"
lng: "-76.9432485836194"
lat: "38.9975986"
,
name: "Parking Lot 5"
code: ""
number: "p126"
lng: "-76.9409605"
lat: "38.9897447"
,
name: "Parking Lot 6"
code: ""
number: "p127"
lng: "-76.9441223999729"
lat: "38.9948837"
,
name: "Parking Lot 9b"
code: ""
number: "p128"
lng: "-76.9379480992313"
lat: "38.99443435"
,
name: "Parking Lot 9c"
code: ""
number: "p129"
lng: "-76.9392026889215"
lat: "38.994263"
,
name: "Parking Lot A"
code: ""
number: "p130"
lng: "-76.9445554411228"
lat: "38.98430415"
,
name: "Parking Lot A*1"
code: ""
number: "p131"
lng: "-76.94812275"
lat: "38.9862722"
,
name: "Parking Lot B"
code: ""
number: "p135"
lng: "-76.9409603"
lat: "38.98961455"
,
name: "Parking Lot BB"
code: ""
number: "p136"
lng: "-76.9437432752211"
lat: "38.98868945"
,
name: "Parking Lot C1"
code: ""
number: "p137"
lng: "-76.9373082470676"
lat: "38.98662605"
,
name: "Parking Lot C2"
code: ""
number: "p138"
lng: "-76.9386466960901"
lat: "38.98525575"
,
name: "Parking Lot CC1"
code: ""
number: "p140"
lng: "-76.9395347604447"
lat: "38.99236635"
,
name: "Parking Lot CC2"
code: ""
number: "p141"
lng: "-76.940687"
lat: "38.9916206"
,
name: "Parking Lot D"
code: ""
number: "p143"
lng: "-76.94503695"
lat: "38.98512635"
,
name: "Parking Lot D"
code: ""
number: "p257"
lng: "-76.9453188124738"
lat: "38.98552325"
,
name: "Parking Lot E"
code: ""
number: "p125"
lng: "-76.9371846"
lat: "38.98927645"
,
name: "Parking Lot E*1"
code: ""
number: "p146"
lng: "-76.93950635"
lat: "38.98895295"
,
name: "Parking Lot E*2"
code: ""
number: "p147"
lng: "-76.93874685"
lat: "38.98936235"
,
name: "Parking Lot EE"
code: ""
number: "p148"
lng: "-76.93777255"
lat: "38.98956665"
,
name: "Parking Lot F"
code: ""
number: "p149"
lng: "-76.9362193313411"
lat: "38.98280315"
,
name: "Parking Lot FF"
code: ""
number: "p150"
lng: "-76.9399114407625"
lat: "38.99431135"
,
name: "Parking Lot FF2"
code: ""
number: "p151"
lng: "-76.9392185247061"
lat: "38.9931298"
,
name: "Parking Lot G"
code: ""
number: "p258"
lng: "-76.9385438831386"
lat: "38.9919303"
,
name: "Parking Lot H1"
code: ""
number: "p152"
lng: "-76.9408901957625"
lat: "38.98714445"
,
name: "Parking Lot HH1"
code: ""
number: "p155"
lng: "-76.9437705364812"
lat: "38.9871964"
,
name: "Parking Lot HH2"
code: ""
number: "p156"
lng: "-76.9487217884698"
lat: "38.99179395"
,
name: "Parking Lot I*"
code: ""
number: "p158"
lng: "-76.9369211101377"
lat: "38.99243845"
,
name: "Parking Lot II1"
code: ""
number: "p239"
lng: "-76.93916665"
lat: "38.9896806"
,
name: "Parking Lot JJ1"
code: ""
number: "p159"
lng: "-76.9483331340347"
lat: "38.98539355"
,
name: "Parking Lot JJ2"
code: ""
number: "p160"
lng: "-76.9493049661468"
lat: "38.9850641"
,
name: "Parking Lot JJ3"
code: ""
number: "p241"
lng: "-76.9491042944466"
lat: "38.98614505"
,
name: "Parking Lot K*2"
code: ""
number: "p162"
lng: "-76.936078711566"
lat: "38.98664725"
,
name: "Parking Lot K*4"
code: ""
number: "p163"
lng: "-76.9348330271331"
lat: "38.986413"
,
name: "Parking Lot K*5"
code: ""
number: "p250"
lng: "-76.9349324800649"
lat: "38.98521105"
,
name: "Parking Lot K1"
code: ""
number: "p164"
lng: "-8564367.45346"
lat: "4719533.97406"
,
name: "Parking Lot K2"
code: ""
number: "p165"
lng: "-76.9346156087241"
lat: "38.98554405"
,
name: "Parking Lot K4"
code: ""
number: "p235"
lng: "-76.9429211998132"
lat: "38.99640555"
,
name: "Parking Lot K5"
code: ""
number: "p167"
lng: "-76.9363026926599"
lat: "38.98296465"
,
name: "Parking Lot K6"
code: ""
number: "p168"
lng: "-76.9548861803616"
lat: "38.99083205"
,
name: "Parking Lot KK"
code: ""
number: "p169"
lng: "-76.939126849749"
lat: "38.99074565"
,
name: "Parking Lot KK1"
code: ""
number: "p237"
lng: "-76.9390710660194"
lat: "38.99103585"
,
name: "Parking Lot L"
code: ""
number: "p170"
lng: "-76.9390440880309"
lat: "38.985601"
,
name: "Parking Lot L*"
code: ""
number: "p171"
lng: "-76.9395731763922"
lat: "38.98602365"
,
name: "Parking Lot ML*"
code: ""
number: "p254"
lng: "-76.94504546108689"
lat: "38.9820502"
,
name: "Parking Lot MM1"
code: ""
number: "p172"
lng: "-76.9442824224091"
lat: "38.99208515"
,
name: "Parking Lot MM2"
code: ""
number: "p173"
lng: "-76.9416584717757"
lat: "38.9924268"
,
name: "Parking Lot MM3"
code: ""
number: "p174"
lng: "-76.94307075"
lat: "38.99134275"
,
name: "Parking Lot N"
code: ""
number: "p178"
lng: "-76.9483259502209"
lat: "38.992758"
,
name: "Parking Lot N*"
code: ""
number: "p177"
lng: "-76.9496207605017"
lat: "38.99315285"
,
name: "Parking Lot N*"
code: ""
number: "p183"
lng: "-76.9488709100114"
lat: "38.99306485"
,
name: "Parking Lot N*1"
code: ""
number: "p246"
lng: "-76.9399408810552"
lat: "38.982763"
,
name: "Parking Lot N*2"
code: ""
number: "p175"
lng: "-76.9388435512149"
lat: "38.98243265"
,
name: "Parking Lot N*3"
code: ""
number: "p176"
lng: "-76.943423876098"
lat: "38.9826911"
,
name: "Parking Lot N3"
code: ""
number: "p179"
lng: "-76.9455457475625"
lat: "38.9927533"
,
name: "Parking Lot N4"
code: ""
number: "p180"
lng: "-76.94428505"
lat: "38.9925133"
,
name: "Parking Lot N5"
code: ""
number: "p181"
lng: "-76.9321558294979"
lat: "38.9835147"
,
name: "Parking Lot N7"
code: ""
number: "p182"
lng: "-76.9477738109564"
lat: "38.9925869"
,
name: "Parking Lot N9"
code: ""
number: "p184"
lng: "-76.946957493951"
lat: "38.99306985"
,
name: "Parking Lot O1"
code: ""
number: "p185"
lng: "-76.94791655"
lat: "38.98399525"
,
name: "Parking Lot O3"
code: ""
number: "p187"
lng: "-76.9483002446715"
lat: "38.98442185"
,
name: "Parking Lot O4"
code: ""
number: "p188"
lng: "-76.9462971999815"
lat: "38.9843053"
,
name: "Parking Lot O5"
code: ""
number: "p189"
lng: "-76.9477835"
lat: "38.98355155"
,
name: "Parking Lot P*"
code: ""
number: "p191"
lng: "-76.94270605"
lat: "38.9981942"
,
name: "Parking Lot P1"
code: ""
number: "p192"
lng: "-76.9431896439374"
lat: "38.998125"
,
name: "Parking Lot P2"
code: ""
number: "p193"
lng: "-76.9421021500106"
lat: "38.99898715"
,
name: "Parking Lot PP*"
code: ""
number: "p195"
lng: "-76.9403724850234"
lat: "38.99385655"
,
name: "Parking Lot PP1"
code: ""
number: "p196"
lng: "-76.9420853788895"
lat: "38.9933824"
,
name: "Parking Lot PP2"
code: ""
number: "p197"
lng: "-76.9406987273544"
lat: "38.99385655"
,
name: "Parking Lot Q"
code: ""
number: "p198"
lng: "-76.9438760840556"
lat: "38.9911695"
,
name: "Parking Lot Q1"
code: ""
number: "p262"
lng: "-76.9434800720619"
lat: "38.9911726"
,
name: "Parking Lot R*"
code: ""
number: "p199"
lng: "-76.9456938440546"
lat: "38.9893944"
,
name: "Parking Lot R3"
code: ""
number: "p200"
lng: "-76.9455308111322"
lat: "38.9902248"
,
name: "Parking Lot R4"
code: ""
number: "p201"
lng: "-76.95470385"
lat: "38.99062525"
,
name: "Parking Lot RR"
code: ""
number: "p203"
lng: "-76.9409602"
lat: "38.98948445"
,
name: "Parking Lot RR1"
code: ""
number: "p251"
lng: "-76.9344902879538"
lat: "38.9869681"
,
name: "Parking Lot RR2"
code: ""
number: "p265"
lng: "-76.9379107465909"
lat: "38.9948436"
,
name: "Parking Lot RR2"
code: ""
number: "p252"
lng: "-76.9343316115429"
lat: "38.9876128"
,
name: "Parking Lot S3"
code: ""
number: "p204"
lng: "-76.9430233373487"
lat: "38.9925145"
,
name: "Parking Lot S4"
code: ""
number: "p205"
lng: "-76.943739453674"
lat: "38.98246725"
,
name: "Parking Lot S5"
code: ""
number: "p206"
lng: "-76.9494324090596"
lat: "38.9933015"
,
name: "Parking Lot S7"
code: ""
number: "p207"
lng: "-76.9477725756751"
lat: "38.99283285"
,
name: "Parking Lot S8"
code: ""
number: "p208"
lng: "-76.9467035382421"
lat: "38.99288295"
,
name: "Parking Lot SD*"
code: ""
number: "p209"
lng: "-76.9489187705445"
lat: "38.9902444"
,
name: "Parking Lot SS1"
code: ""
number: "p211"
lng: "-76.9441549116248"
lat: "38.9937615"
,
name: "Parking Lot SS2"
code: ""
number: "p212"
lng: "-76.9463360969161"
lat: "38.99397245"
,
name: "Parking Lot SS3"
code: ""
number: "p213"
lng: "-76.94667609821"
lat: "38.9942645"
,
name: "Parking Lot T"
code: ""
number: "p215"
lng: "-76.9377298240488"
lat: "38.98986685"
,
name: "Parking Lot TT"
code: ""
number: "p217"
lng: "-76.94036895"
lat: "38.98852595"
,
name: "Parking Lot U1"
code: ""
number: "p218"
lng: "-76.9437394"
lat: "38.9826911"
,
name: "Parking Lot U6"
code: ""
number: "p260"
lng: "-76.9450241678599"
lat: "38.9828241"
,
name: "Parking Lot U6"
code: ""
number: "p247"
lng: "-76.9449759499812"
lat: "38.9828063"
,
name: "Parking Lot UU"
code: ""
number: "p224"
lng: "-76.9398296251258"
lat: "38.99054885"
,
name: "Parking Lot W"
code: ""
number: "p225"
lng: "-76.9413860371152"
lat: "38.98486265"
,
name: "Parking Lot W1"
code: ""
number: "p244"
lng: "-76.9424845941378"
lat: "38.98494595"
,
name: "Parking Lot XX1"
code: ""
number: "p226"
lng: "-76.935617290379"
lat: "38.9900403"
,
name: "Parking Lot XX2"
code: ""
number: "p227"
lng: "-76.9367968153632"
lat: "38.99147625"
,
name: "Parking Lot XX4"
code: ""
number: "p229"
lng: "-76.937826475598"
lat: "38.99316725"
,
name: "Parking Lot XX5"
code: ""
number: "p233"
lng: "-76.93692095"
lat: "38.9909658"
,
name: "Parking Lot Y"
code: ""
number: "p230"
lng: "-76.9419977518114"
lat: "38.9840286"
,
name: "Parking Lot YC"
code: ""
number: "p231"
lng: "-76.9480388560794"
lat: "38.993677"
,
name: "Parking Lot Z"
code: ""
number: "p240"
lng: "-76.9485514812047"
lat: "38.98813685"
,
name: "Parking Lot Z*"
code: ""
number: "p232"
lng: "-76.9467581"
lat: "38.9887473"
,
name: "Parking Lot Z1"
code: ""
number: "p238"
lng: "-76.9475152"
lat: "38.9890159"
,
name: "Patapsco Building"
code: ""
number: "805"
lng: "-76.9249429909109"
lat: "38.9767149"
,
name: "Patuxent Building"
code: ""
number: "010"
lng: "-76.943483756875"
lat: "38.98863025"
,
name: "Pest Control Trailer"
code: ""
number: "385"
lng: "-76.9335922924701"
lat: "38.98515375"
,
name: "<NAME>"
code: ""
number: "130"
lng: "-76.93463845"
lat: "38.9845988"
,
name: "<NAME> Sigma Sorority"
code: ""
number: "171"
lng: "-76.9356071135738"
lat: "38.9813563"
,
name: "Physical Distribution Center (Terrapin Trader)"
code: ""
number: "383"
lng: "-76.9292279695603"
lat: "38.9819567"
,
name: "Physics Building"
code: "PHY"
number: "082"
lng: "-76.94007719774"
lat: "38.9886972"
,
name: "<NAME>"
code: ""
number: "127"
lng: "-76.936200475"
lat: "38.9846297"
,
name: "Plant Operations and Maintenance"
code: ""
number: "217"
lng: "-76.9340028269637"
lat: "38.98600515"
,
name: "Plant Operations and Maintenance Shop Building"
code: ""
number: "055"
lng: "-76.9334591527876"
lat: "38.98503875"
,
name: "Plant Operations and Maintenance Shop2"
code: ""
number: "101"
lng: "-76.9344044678528"
lat: "38.98657335"
,
name: "Plant Operations and Maintenance Shop3"
code: ""
number: "212"
lng: "-76.9342274025325"
lat: "38.9867869"
,
name: "Plant Operations and Maintenance Shops"
code: ""
number: "006"
lng: "-76.93452175"
lat: "38.9858077"
,
name: "Plant Operations and Maintenance Warehouse"
code: ""
number: "012"
lng: "-76.9339846304955"
lat: "38.98636865"
,
name: "Plant Science Building"
code: "PLS"
number: "036"
lng: "-76.9413596237531"
lat: "38.98873715"
,
name: "Pocomoke Building"
code: ""
number: "007"
lng: "-76.9371601416079"
lat: "38.98295055"
,
name: "Police Impound"
code: ""
number: "p249"
lng: "-76.9312547643577"
lat: "38.98301005"
,
name: "Police Substation"
code: ""
number: "018"
lng: "-76.9355837584007"
lat: "38.9825402"
,
name: "Potomac Building"
code: ""
number: "092"
lng: "-76.93830195"
lat: "38.9903918"
,
name: "Preink<NAME> Field House"
code: "PKT"
number: "054"
lng: "-76.9461320708538"
lat: "38.9844511"
,
name: "Presidents Residence"
code: ""
number: "164"
lng: "-76.9523499501432"
lat: "38.9885031"
,
name: "<NAME>"
code: "PGG"
number: "021"
lng: "-76.941826725"
lat: "38.9825857"
,
name: "Pump <NAME>"
code: ""
number: "200"
lng: "-76.9465122"
lat: "38.9886431"
,
name: "<NAME>"
code: "QAN"
number: "061"
lng: "-76.9460068545931"
lat: "38.98519025"
,
name: "<NAME>"
code: "ARM"
number: "078"
lng: "-76.9389662139866"
lat: "38.9860268"
,
name: "Recreation Artificial Turf Field"
code: ""
number: "d102"
lng: "-76.9391820716749"
lat: "38.9954899"
,
name: "Recycling Center"
code: ""
number: "107"
lng: "-76.9372141594106"
lat: "38.99566315"
,
name: "Regents Drive Parking Garage"
code: ""
number: "202"
lng: "-76.9414582777038"
lat: "38.9897313"
,
name: "<NAME>"
code: ""
number: "398"
lng: "-76.942996197787"
lat: "38.997127"
,
name: "<NAME>"
code: "RIT"
number: "004"
lng: "-76.936456447182"
lat: "38.98504805"
,
name: "<NAME> Stadium"
code: ""
number: "409"
lng: "-76.9397458168949"
lat: "38.99629445"
,
name: "<NAME>l Center for Jewish Life"
code: ""
number: "c105"
lng: "-76.9487503080679"
lat: "38.98270895"
,
name: "<NAME>"
code: ""
number: "080"
lng: "-76.9376116836577"
lat: "38.9853423"
,
name: "Route One Annex"
code: ""
number: "e8400"
lng: "-76.9332763982874"
lat: "38.99373045"
,
name: "<NAME> IV Alumni Center"
code: ""
number: "407"
lng: "-76.9490713634912"
lat: "38.9894604"
,
name: "Satellite Central Utilities Building 1"
code: "SCUB 1"
number: "019"
lng: "-76.9404483999786"
lat: "38.98202525"
,
name: "Satellite Central Utilities Building 2"
code: "SCUB 2"
number: "067"
lng: "-76.9446545340067"
lat: "38.9835376"
,
name: "Satellite Central Utilities Building 3"
code: "SCUB 3"
number: "392"
lng: "-76.9459512158262"
lat: "38.98909655"
,
name: "Satellite Central Utilities Building 4"
code: "SCUB 4"
number: "405"
lng: "-76.9383176"
lat: "38.98979345"
,
name: "School of Public Health"
code: "SPH"
number: "255"
lng: "-76.9431633838994"
lat: "38.9934922"
,
name: "Security Booth (Campus Drive)"
code: ""
number: "295"
lng: "-76.936510313403"
lat: "38.98857565"
,
name: "Security Booth (Stadium Drive)"
code: ""
number: "297"
lng: "-76.95045855"
lat: "38.99198405"
,
name: "Service Building (University of Maryland, Police)"
code: ""
number: "003"
lng: "-76.9361406358543"
lat: "38.98597645"
,
name: "<NAME>"
code: ""
number: "109"
lng: "-76.9414407668993"
lat: "38.99169565"
,
name: "Ship<NAME> Field House (Baseball)"
code: ""
number: "159"
lng: "-76.9441134376408"
lat: "38.9889419"
,
name: "Shoemaker Building"
code: "SHM"
number: "037"
lng: "-76.942745495777"
lat: "38.9839376"
,
name: "Sh<NAME> Laboratory"
code: "SHR"
number: "075"
lng: "-76.94191685"
lat: "38.98727505"
,
name: "Shuttle Bus Facility"
code: ""
number: "p123"
lng: "-76.9365504141656"
lat: "38.9955167"
,
name: "Shuttle Bus Facility"
code: ""
number: "013"
lng: "-76.9338891741985"
lat: "38.98692845"
,
name: "Shuttle Bus Trailer 2"
code: ""
number: "410"
lng: "-76.93407295"
lat: "38.9870785"
,
name: "Sigma Delta Tau Sorority"
code: ""
number: "174"
lng: "-76.93587565"
lat: "38.9806496"
,
name: "<NAME>"
code: ""
number: "135"
lng: "-76.93514475"
lat: "38.9832453"
,
name: "<NAME>"
code: ""
number: "133"
lng: "-76.9341870726646"
lat: "38.98356725"
,
name: "Skinner Building"
code: "SKN"
number: "044"
lng: "-76.9418413304917"
lat: "38.98480955"
,
name: "Somerset Hall (Residence Hall)"
code: "SOM"
number: "063"
lng: "-76.9455473"
lat: "38.9850026"
,
name: "South Campus Commons 1"
code: ""
number: "996"
lng: "-76.9429384672295"
lat: "38.98211405"
,
name: "South Campus Commons 2"
code: ""
number: "997"
lng: "-76.942933891664"
lat: "38.9828397"
,
name: "South Campus Commons 3"
code: ""
number: "998"
lng: "-76.9397267748109"
lat: "38.98147685"
,
name: "South Campus Commons 4"
code: ""
number: "999"
lng: "-76.9414232588701"
lat: "38.98128715"
,
name: "South Campus Commons 5"
code: ""
number: "974"
lng: "-76.94470195"
lat: "38.98275075"
,
name: "South Campus Commons 6"
code: ""
number: "975"
lng: "-76.9446791864948"
lat: "38.98216555"
,
name: "South Campus Commons 7"
code: ""
number: "281"
lng: "-76.9445591280214"
lat: "38.9815547"
,
name: "South Campus Dining Hall"
code: "SDH"
number: "026"
lng: "-76.9436837393588"
lat: "38.983048"
,
name: "South Gate (Regents Dr. and Rt. 1)"
code: ""
number: "401"
lng: "-76.938869"
lat: "38.9823717"
,
name: "Special Education Trailer One"
code: ""
number: "104"
lng: "-76.94795265"
lat: "38.9869185"
,
name: "Special Education Trailer Two"
code: ""
number: "105"
lng: "-76.94795265"
lat: "38.9867484"
,
name: "Special Services Office Building"
code: ""
number: "100"
lng: "-76.93443985"
lat: "38.98651785"
,
name: "St. Mary's Hall (Residence Hall)"
code: "STM"
number: "062"
lng: "-76.9455983683182"
lat: "38.9870071"
,
name: "Stadium Drive Parking Garage"
code: "SDG"
number: "218"
lng: "-76.9489997913519"
lat: "38.9910407"
,
name: "<NAME>"
code: "SQH"
number: "233"
lng: "-76.943749743933"
lat: "38.9820745"
,
name: "<NAME>"
code: "SYM"
number: "076"
lng: "-76.9406455474374"
lat: "38.9870811"
,
name: "<NAME> (Residence Hall)"
code: "TAL"
number: "030"
lng: "-76.9422561"
lat: "38.9833387"
,
name: "<NAME>"
code: "TLF"
number: "043"
lng: "-76.9430948392412"
lat: "38.98484635"
,
name: "<NAME>"
code: ""
number: "128"
lng: "-76.93563395"
lat: "38.9846469"
,
name: "Tawes Fine Arts Building"
code: "TWS"
number: "141"
lng: "-76.9483238494655"
lat: "38.98598645"
,
name: "Technology Advancement Building"
code: ""
number: "387"
lng: "-76.938615024697"
lat: "38.99251055"
,
name: "Technology Ventures Building"
code: ""
number: "806"
lng: "-76.9255078851059"
lat: "38.97964495"
,
name: "Temporary Building"
code: ""
number: "208"
lng: "-76.9337296097556"
lat: "38.9852606"
,
name: "Temporary Building1"
code: ""
number: "207"
lng: "-76.9327854891442"
lat: "38.9836276"
,
name: "Temporary Building2"
code: ""
number: "204"
lng: "-76.9328960630658"
lat: "38.983738"
,
name: "Terrapin Trail Parking Garage"
code: ""
number: "403"
lng: "-76.9433528356638"
lat: "38.99497"
,
name: "The Diner"
code: ""
number: "257"
lng: "-76.9466253202392"
lat: "38.9925679"
,
name: "The Domain"
code: ""
number: "a004"
lng: "-76.9493582912198"
lat: "38.98418695"
,
name: "The Varsity"
code: ""
number: "a003"
lng: "-76.9344594915205"
lat: "38.99146725"
,
name: "Track/Soccer Ticket Booth"
code: ""
number: "389"
lng: "-76.9498700750045"
lat: "38.9874593"
,
name: "<NAME> (Dairy/Visitor Center)"
code: "TUR"
number: "079"
lng: "-76.9373371743758"
lat: "38.98614065"
,
name: "<NAME>"
code: "TYD"
number: "042"
lng: "-76.9440007655894"
lat: "38.9848516"
,
name: "<NAME>"
code: ""
number: "361"
lng: "-76.9478028839735"
lat: "38.98970955"
,
name: "UMUC Hotel"
code: ""
number: "348"
lng: "-76.9552482403172"
lat: "38.9860612"
,
name: "UMUC Inn and Conference Center (ICC)"
code: "ICC"
number: "345"
lng: "-76.9540770364453"
lat: "38.9860659"
,
name: "UMUC Student and Faculty Services Center"
code: "SFSC"
number: "346"
lng: "-76.9533396302397"
lat: "38.9866594"
,
name: "UMUC/University College Garage (PGUC)"
code: "PGUC"
number: "347"
lng: "-76.9544177389556"
lat: "38.98699405"
,
name: "Union Lane Parking Garage"
code: "ULG"
number: "179"
lng: "-76.9458968038597"
lat: "38.98840855"
,
name: "University Baptist Church"
code: ""
number: "c100"
lng: "-76.95374930076"
lat: "38.98425975"
,
name: "University Hills Apartments1"
code: ""
number: "288"
lng: "-76.9562455643617"
lat: "38.98514415"
,
name: "University Hills Apartments2"
code: ""
number: "287"
lng: "-76.9572075604853"
lat: "38.9850375"
,
name: "University Hills Apartments3"
code: ""
number: "286"
lng: "-76.9582383073659"
lat: "38.98494235"
,
name: "University Hills Apartments4"
code: ""
number: "292"
lng: "-76.9583539620815"
lat: "38.98442205"
,
name: "University Hills Apartments5"
code: ""
number: "291"
lng: "-76.9573552914719"
lat: "38.9844605"
,
name: "University Hills Apartments6"
code: ""
number: "290"
lng: "-76.9562790737231"
lat: "38.9843953"
,
name: "University Hills Apartments7"
code: ""
number: "289"
lng: "-76.9558676954921"
lat: "38.98447435"
,
name: "University United Methodist Church"
code: ""
number: "c101"
lng: "-76.9513539156316"
lat: "38.98421265"
,
name: "University View"
code: ""
number: "a001"
lng: "-76.9344298975591"
lat: "38.99256455"
,
name: "University View II"
code: ""
number: "a002"
lng: "-76.9336210376858"
lat: "38.99251945"
,
name: "Univesity Research Center (North)"
code: ""
number: "F05"
lng: "-76.9222887214757"
lat: "38.9722927"
,
name: "Univesity Research Center (South)"
code: ""
number: "F06"
lng: "-76.9218988073354"
lat: "38.970879"
,
name: "<NAME>"
code: "VMH"
number: "039"
lng: "-76.9470382916646"
lat: "38.9830467"
,
name: "Varsity Sports Teamhouse"
code: ""
number: "158"
lng: "-76.9451814170622"
lat: "38.99006865"
,
name: "Washington Hall (Residence Hall)"
code: "WSH"
number: "023"
lng: "-76.9413924749198"
lat: "38.9818076"
,
name: "W<NAME>ico Hall (Residence Hall)"
code: "WIC"
number: "069"
lng: "-76.9458442795105"
lat: "38.9837427"
,
name: "Wind Tunnel Building"
code: "WTU"
number: "081"
lng: "-76.9368484609687"
lat: "38.9899002"
,
name: "W<NAME>s Hall"
code: "WDS"
number: "047"
lng: "-76.9418451233709"
lat: "38.9851106"
,
name: "W<NAME>ster Hall (Residence Hall)"
code: "WOR"
number: "051"
lng: "-76.9449993420465"
lat: "38.98467285"
,
name: "<NAME>"
code: ""
number: "139"
lng: "-76.9371918048901"
lat: "38.9832052"
,
name: "<NAME>"
code: ""
number: "138"
lng: "-76.9366895"
lat: "38.9832584"
,
name: "<NAME>"
code: ""
number: "137"
lng: "-76.9361831249379"
lat: "38.98325535"
]
| true | mongoose = require('mongoose')
{Building, User, Course} = require './models'
exports.bootstrap = ->
for b in BUILDINGS
Building.update {number: b.number}, b, upsert: true, ->
User.findOrCreate('zfogg')
User.findOrCreate('zoodle')
exports.buildings = BUILDINGS = [
name: "251 North"
code: ""
number: "251"
lng: "-76.9496090325357"
lat: "38.99274005"
,
name: "94th Aero Squadron"
code: ""
number: "F08"
lng: "-76.9210122711411"
lat: "38.9781702"
,
name: "PI:NAME:<NAME>END_PI Federal Building FDA"
code: "FDA"
number: "F01"
lng: "-76.9261965846492"
lat: "38.9770124"
,
name: "PI:NAME:<NAME>END_PI"
code: "AVW"
number: "115"
lng: "-76.9363418604721"
lat: "38.99079905"
,
name: "PI:NAME:<NAME>END_PI Student Union Buildings"
code: "SSU"
number: "163"
lng: "-76.9447218233957"
lat: "38.98816455"
,
name: "Agriculture Shed"
code: ""
number: "102"
lng: "-76.9407337234236"
lat: "38.99198175"
,
name: "Allegany Hall (Residence Hall)"
code: "AGY"
number: "024"
lng: "-76.9414146"
lat: "38.98157935"
,
name: "Alpha Chi Omega Sorority"
code: ""
number: "172"
lng: "-76.9360293156412"
lat: "38.9814223"
,
name: "Alpha Delta Pi Sorority"
code: ""
number: "170"
lng: "-76.9352079623363"
lat: "38.981239"
,
name: "Alpha Epsilon Phi Sorority"
code: ""
number: "136"
lng: "-76.93562535"
lat: "38.98324535"
,
name: "Alpha Epsilon Pi"
code: ""
number: "129"
lng: "-76.9351584"
lat: "38.9846571"
,
name: "Alpha Phi Sorority"
code: ""
number: "176"
lng: "-76.9352442035259"
lat: "38.9808974"
,
name: "Alpha Sigma Phi Fraternity"
code: ""
number: "134"
lng: "-76.9346571906316"
lat: "38.98329385"
,
name: "American Center for Physics"
code: ""
number: "F03"
lng: "-76.9299850652696"
lat: "38.97152715"
,
name: "Animal Science Service Building"
code: ""
number: "103"
lng: "-76.9410828738213"
lat: "38.99200365"
,
name: "Animal Science/Agricultural Engineering Building"
code: "ANS"
number: "142"
lng: "-76.9394592785606"
lat: "38.99164625"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "008"
lng: "-76.9400675383842"
lat: "38.9822303"
,
name: "PI:NAME:<NAME>END_PI"
code: "ANA"
number: "060"
lng: "-76.94673185"
lat: "38.98595025"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "156"
lng: "-76.9487330028218"
lat: "38.99193245"
,
name: "Aquatics Center"
code: ""
number: "399"
lng: "-76.9464978414166"
lat: "38.9933884"
,
name: "Architecture Building"
code: "ARC"
number: "145"
lng: "-76.9475462198212"
lat: "38.98423555"
,
name: "Art-Sociology Building"
code: "ASY"
number: "146"
lng: "-76.9478947752967"
lat: "38.98528145"
,
name: "Baltimore Hall (Residence Hall)"
code: "BAL"
number: "016"
lng: "-76.9421925409139"
lat: "38.9822469"
,
name: "Bel Air Hall (Residence Hall)"
code: "BEL"
number: "099"
lng: "-76.9426116"
lat: "38.99281705"
,
name: "PI:NAME:<NAME>END_PI"
code: "EDU"
number: "143"
lng: "-76.9474215"
lat: "38.9867095"
,
name: "Biology-Psychology Building"
code: "BPS"
number: "144"
lng: "-76.9425929312518"
lat: "38.98872265"
,
name: "Biomolecular Sciences Building"
code: ""
number: "296"
lng: "-76.9376944355845"
lat: "38.99285415"
,
name: "Biosciences Research Building"
code: "BRB"
number: "413"
lng: "-76.9428094130756"
lat: "38.98897595"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "119"
lng: "-76.94089305"
lat: "38.9922976"
,
name: "Building Services Operations Building"
code: ""
number: "215"
lng: "-76.9346037302467"
lat: "38.98626085"
,
name: "PI:NAME:<NAME>END_PI Stadium"
code: "BRD"
number: "364"
lng: "-76.9475181930938"
lat: "38.99033895"
,
name: "Byrd Stadium Maintenance Building"
code: ""
number: "369"
lng: "-76.9458711262378"
lat: "38.99025255"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "CAL"
number: "015"
lng: "-76.9423134911421"
lat: "38.98292125"
,
name: "Cambridge Community Center"
code: "CCC"
number: "097"
lng: "-76.9429981"
lat: "38.99217805"
,
name: "CamPI:NAME:<NAME>END_PI (Residence Hall)"
code: "CAM"
number: "096"
lng: "-76.9430174173239"
lat: "38.99172105"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "CAR"
number: "070"
lng: "-76.9457925"
lat: "38.9834966"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "CRL"
number: "065"
lng: "-76.9456253332123"
lat: "38.9839834"
,
name: "CathPI:NAME:<NAME>END_PI"
code: ""
number: "c104"
lng: "-76.9450898077953"
lat: "38.98056895"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "110"
lng: "-76.9403740849109"
lat: "38.9923982"
,
name: "PI:NAME:<NAME>END_PI"
code: "CEC"
number: "017"
lng: "-76.9416573737624"
lat: "38.9829463"
,
name: "Center for the Advanced Study of Language"
code: ""
number: "F02"
lng: "-76.9256352793971"
lat: "38.9741989"
,
name: "Center for Young Children"
code: ""
number: "381"
lng: "-76.9484830471325"
lat: "38.99352845"
,
name: "Central Animal Resources Facility"
code: ""
number: "087"
lng: "-76.9383409902712"
lat: "38.9917051"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "CEN"
number: "098"
lng: "-76.9421248952804"
lat: "38.992271"
,
name: "Chabad Jewish Student Center"
code: ""
number: "c102"
lng: "-76.9332465412442"
lat: "38.9803053"
,
name: "Challenge Course"
code: ""
number: "d108"
lng: "-76.9452623440046"
lat: "38.99415265"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "d103"
lng: "-76.9389471052562"
lat: "38.9837497"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "CHL"
number: "025"
lng: "-76.9405193937813"
lat: "38.9816226"
,
name: "Chemical and Nuclear Engineering Building"
code: "CHE"
number: "090"
lng: "-76.9395374654513"
lat: "38.99060895"
,
name: "Chemistry Building"
code: "CHM"
number: "091"
lng: "-76.940171718657"
lat: "38.98959475"
,
name: "Chesapeake Building"
code: ""
number: "338"
lng: "-76.94215145"
lat: "38.99828345"
,
name: "Chestertown Hall (Residence Hall)"
code: "CHS"
number: "121"
lng: "-76.9434348"
lat: "38.99280355"
,
name: "Chincoteague Hall"
code: ""
number: "059"
lng: "-76.9445192324004"
lat: "38.9852322"
,
name: "PI:NAME:<NAME>END_PI Performing Arts Center"
code: "PAC"
number: "386"
lng: "-76.9504434053224"
lat: "38.9906807"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "d107"
lng: "-76.9458984242081"
lat: "38.99419435"
,
name: "Cole Student Activities Building"
code: "COL"
number: "162"
lng: "-76.9467662555511"
lat: "38.98797155"
,
name: "College Park Fire Station"
code: ""
number: "802"
lng: "-76.9336943536548"
lat: "38.9903857"
,
name: "College Park LDS Institute of Religion"
code: ""
number: "c106"
lng: "-76.9474186908036"
lat: "38.981888"
,
name: "College Park Metro Station"
code: ""
number: "p256"
lng: "-76.92800716569322"
lat: "38.97825115"
,
name: "Comcast Center"
code: ""
number: "360"
lng: "-76.9413366066757"
lat: "38.9956305"
,
name: "Computer and Space Sciences Building"
code: "CSS"
number: "224"
lng: "-76.9425669540912"
lat: "38.9909812"
,
name: "Computer Science Instructional Center"
code: "CSI"
number: "406"
lng: "-76.9361946"
lat: "38.98999465"
,
name: "Cumberland Hall (Residence Hall)"
code: "CBD"
number: "122"
lng: "-76.9439244117559"
lat: "38.9922614"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "131"
lng: "-76.9341748748757"
lat: "38.98432725"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "175"
lng: "-76.9354323628479"
lat: "38.9805908"
,
name: "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"
code: ""
number: "173"
lng: "-76.9363517104313"
lat: "38.9807542"
,
name: "DPI:NAME:<NAME>END_PI Hall (Residence Hall)"
code: "DEN"
number: "252"
lng: "-76.9499786106729"
lat: "38.99223865"
,
name: "DPI:NAME:<NAME>END_PIster Hall (Residence Hall)"
code: "DOR"
number: "064"
lng: "-76.9461812019092"
lat: "38.9867742"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "p248"
lng: "-76.931625967854"
lat: "38.98291445"
,
name: "PI:NAME:<NAME>END_PIon Hall (Residence Hall)"
code: "EAS"
number: "253"
lng: "-76.9502564099265"
lat: "38.9930193"
,
name: "PI:NAME:<NAME>END_PI Hall (Residence Hall)"
code: "ELK"
number: "254"
lng: "-76.9489576649778"
lat: "38.9925108"
,
name: "PI:NAME:<NAME>END_PI Hall (Residence Hall)"
code: "ELL"
number: "256"
lng: "-76.9466586443134"
lat: "38.9918228"
,
name: "Energy Plant"
code: ""
number: "001"
lng: "-76.935683449548"
lat: "38.98585305"
,
name: "Energy Research Facility"
code: ""
number: "223"
lng: "-76.936862109123"
lat: "38.99192655"
,
name: "Engineering Annex"
code: "EAB"
number: "093"
lng: "-76.9370872500394"
lat: "38.99073865"
,
name: "Engineering Fields"
code: ""
number: "d104"
lng: "-76.9375298581359"
lat: "38.98753725"
,
name: "Engineering Laboratory Building"
code: "EGL"
number: "089"
lng: "-76.9379467111811"
lat: "38.98926235"
,
name: "Environmental Services Facility"
code: ""
number: "344"
lng: "-76.9437334181758"
lat: "38.9965269"
,
name: "Episcopal Student Center"
code: ""
number: "c107"
lng: "-76.9358864728133"
lat: "38.9820118"
,
name: "Eppley Campus Recreation Center"
code: "CRC"
number: "068"
lng: "-76.9452703132821"
lat: "38.99358165"
,
name: "Field Hockey and Lacrosse Complex"
code: ""
number: "414"
lng: "-76.9368614977832"
lat: "38.99472745"
,
name: "PI:NAME:<NAME>END_PI"
code: "KEY"
number: "048"
lng: "-76.9430892753028"
lat: "38.98506175"
,
name: "Fraternity Fields"
code: ""
number: "d105"
lng: "-76.9358798758218"
lat: "38.9839311"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "FRD"
number: "029"
lng: "-76.9407467785094"
lat: "38.98204525"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "GRT"
number: "031"
lng: "-76.9426804"
lat: "38.98323575"
,
name: "Gate House"
code: ""
number: "299"
lng: "-76.9357643250823"
lat: "38.98864785"
,
name: "Geology Building"
code: "GEO"
number: "237"
lng: "-76.9409099576973"
lat: "38.9881304"
,
name: "Golf Course Clubhouse"
code: "GLF"
number: "166"
lng: "-76.9547070820405"
lat: "38.99113325"
,
name: "Golf Course Maintenance"
code: ""
number: "314"
lng: "-76.9531787893985"
lat: "38.9973127"
,
name: "Golub Property"
code: ""
number: "808"
lng: "-76.9301602208332"
lat: "38.96954285"
,
name: "Gossett Football Team House"
code: ""
number: "379"
lng: "-76.9462999828512"
lat: "38.98978335"
,
name: "Graduate Garden Apartment Complex"
code: ""
number: "260"
lng: "-76.9573161915849"
lat: "38.9845594"
,
name: "Ground Herbicide/Pesticide Storage Building"
code: ""
number: "327"
lng: "-76.94380855"
lat: "38.9956043"
,
name: "Grounds Material and Equipment Building"
code: ""
number: "328"
lng: "-76.94330185"
lat: "38.99561845"
,
name: "Grounds Office Building"
code: ""
number: "050"
lng: "-76.9437121"
lat: "38.99586895"
,
name: "Grounds Operations and Maintenance Facility"
code: ""
number: "124"
lng: "-76.9428899"
lat: "38.9955292"
,
name: "PI:NAME:<NAME>END_PI"
code: "HJP"
number: "073"
lng: "-76.9432766035148"
lat: "38.98708535"
,
name: "HPI:NAME:<NAME>END_PIstown Hall (Residence Hall)"
code: "HAG"
number: "258"
lng: "-76.9474136961276"
lat: "38.9924384"
,
name: "PI:NAME:<NAME>END_PI Hall (Residence Hall)"
code: "HRF"
number: "014"
lng: "-76.9408308448032"
lat: "38.9824913"
,
name: "PI:NAME:<NAME>END_PI"
code: "HAR"
number: "002"
lng: "-76.9356386027744"
lat: "38.98703195"
,
name: "Health Center"
code: ""
number: "140"
lng: "-76.9447460619769"
lat: "38.9872158"
,
name: "Heavy Equipment Building"
code: ""
number: "216"
lng: "-76.9338248720072"
lat: "38.98591735"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "F07"
lng: "-76.923644"
lat: "38.97594655"
,
name: "PI:NAME:<NAME>END_PI (Horticulture)"
code: "HZF"
number: "074"
lng: "-76.9418893729789"
lat: "38.98687755"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "c103"
lng: "-76.9440416856427"
lat: "38.98023825"
,
name: "PI:NAME:<NAME>END_PI"
code: "HBK"
number: "147"
lng: "-76.9415740921482"
lat: "38.9881767"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "108"
lng: "-76.9408036631471"
lat: "38.9916839"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "HOW"
number: "028"
lng: "-76.9419727992265"
lat: "38.98196065"
,
name: "HVAC Building"
code: ""
number: "056"
lng: "-76.9437313"
lat: "38.9883195"
,
name: "Indoor Practice Facility"
code: ""
number: "309"
lng: "-76.9543808895367"
lat: "38.9896622"
,
name: "Institute for Physical Science and Technology"
code: "IPT"
number: "085"
lng: "-76.94097235"
lat: "38.9909191"
,
name: "Institute for Physical Science and Technology Storage"
code: "IPT"
number: "086"
lng: "-76.941282950122"
lat: "38.9908276"
,
name: "Instructional Television Facility"
code: "ITV"
number: "045"
lng: "-76.9383093000325"
lat: "38.98957525"
,
name: "PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI Football Practice Complex"
code: ""
number: "d100"
lng: "-76.9441988"
lat: "38.9902169"
,
name: "PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI"
code: ""
number: "388"
lng: "-76.9505711919375"
lat: "38.9878761"
,
name: "PI:NAME:<NAME>END_PI"
code: "JMP"
number: "083"
lng: "-76.940295409728"
lat: "38.9905177"
,
name: "PI:NAME:<NAME>END_PI Building"
code: "KEB"
number: "225"
lng: "-76.938025589911"
lat: "38.9909099"
,
name: "PI:NAME:<NAME>END_PI"
code: "JMZ"
number: "034"
lng: "-76.94455005"
lat: "38.9867867"
,
name: "PI:NAME:<NAME>END_PI"
code: "JUL"
number: "227"
lng: "-76.9435848"
lat: "38.99087705"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "126"
lng: "-76.936671546034"
lat: "38.98468565"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "KNT"
number: "022"
lng: "-76.9418433747954"
lat: "38.98324525"
,
name: "PI:NAME:<NAME>END_PI"
code: "KNI"
number: "417"
lng: "-76.9484189"
lat: "38.98677255"
,
name: "Laboratory for Physical Sciences"
code: ""
number: "F09"
lng: "-76.9439779039198"
lat: "39.00369935"
,
name: "Laboratory for Telecommunications Sciences"
code: ""
number: "F10"
lng: "-76.943618302473"
lat: "39.00494075"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "132"
lng: "-76.9340275582429"
lat: "38.98394255"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "d101"
lng: "-76.9449878688894"
lat: "38.9925556"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "LPA"
number: "259"
lng: "-76.9458907104711"
lat: "38.9924497"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "F11"
lng: "-76.9413782929899"
lat: "38.9996436"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "071"
lng: "-76.9395264"
lat: "38.9853629"
,
name: "PI:NAME:<NAME>END_PI"
code: "LEF"
number: "038"
lng: "-76.9436347921034"
lat: "38.9836849"
,
name: "PI:NAME:<NAME>END_PI Community Center"
code: ""
number: "250"
lng: "-76.9334623293913"
lat: "38.9833564"
,
name: "Leonardtown Housing"
code: ""
number: "238"
lng: "-76.9329720567893"
lat: "38.98288495"
,
name: "Leonardtown Housing 12"
code: ""
number: "249"
lng: "-76.933234970697"
lat: "38.98369045"
,
name: "Leonardtown Housing1"
code: ""
number: "248"
lng: "-76.9334063277435"
lat: "38.98395015"
,
name: "Leonardtown Housing10"
code: ""
number: "239"
lng: "-76.9328295569528"
lat: "38.98259145"
,
name: "LePI:NAME:<NAME>END_PIardtown Housing12"
code: ""
number: "244"
lng: "-76.9337842335381"
lat: "38.9848148"
,
name: "LePI:NAME:<NAME>END_PIardtown Housing2"
code: ""
number: "242"
lng: "-76.932084393545"
lat: "38.9826975"
,
name: "PI:NAME:<NAME>END_PItown Housing3"
code: ""
number: "249"
lng: "-76.93322662199752"
lat: "38.98369225"
,
name: "PI:NAME:<NAME>END_PItown Housing4"
code: ""
number: "247"
lng: "-76.9330952978623"
lat: "38.9842544"
,
name: "Leonardtown Housing5"
code: ""
number: "246"
lng: "-76.9335417977186"
lat: "38.98432445"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "245"
lng: "-76.9333959355818"
lat: "38.9846063"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "243"
lng: "-76.9320609074038"
lat: "38.98295505"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "241"
lng: "-76.9322669026445"
lat: "38.98256985"
,
name: "PI:NAME:<NAME>END_PI9"
code: ""
number: "240"
lng: "-76.932593405223"
lat: "38.98269485"
,
name: "PI:NAME:<NAME>END_PI Office Building"
code: ""
number: "201"
lng: "-76.9324305637608"
lat: "38.9837051"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "p117"
lng: "-76.9466368038263"
lat: "38.99133375"
,
name: "Main Administration Building"
code: ""
number: "077"
lng: "-76.93980935"
lat: "38.9860173"
,
name: "Manufacturing Building"
code: ""
number: "148"
lng: "-76.9394357215756"
lat: "38.99286495"
,
name: "PI:NAME:<NAME>END_PI"
code: "MMH"
number: "046"
lng: "-76.94075799224"
lat: "38.9850013"
,
name: "PI:NAME:<NAME>END_PI"
code: "EGR"
number: "088"
lng: "-76.9379744096244"
lat: "38.9888688"
,
name: "Mathematics Building"
code: "MTH"
number: "084"
lng: "-76.9390688130666"
lat: "38.98862265"
,
name: "PI:NAME:<NAME>END_PI"
code: "MCK"
number: "035"
lng: "-76.9451004712142"
lat: "38.98598155"
,
name: "PI:NAME:<NAME>END_PI"
code: "MKM"
number: "d106"
lng: "-76.9422685216793"
lat: "38.9859886"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "009"
lng: "-76.9408661301378"
lat: "38.98415015"
,
name: "PI:NAME:<NAME>END_PI ParkPI:NAME:<NAME>END_PI GarPI:NAME:<NAME>END_PI"
code: ""
number: "p255"
lng: "-76.92753801615109"
lat: "38.97901515"
,
name: "PI:NAME:<NAME>END_PIFRI Drill Tower"
code: ""
number: "195"
lng: "-76.9299067182804"
lat: "38.983559"
,
name: "MFRI Fire Extinguisher Training Facility"
code: ""
number: "194"
lng: "-76.9287741861655"
lat: "38.98401155"
,
name: "MFRI Office/Classroom Building"
code: ""
number: "199"
lng: "-76.9288594688342"
lat: "38.98332815"
,
name: "MFRI Structural Firefighting Building"
code: ""
number: "196"
lng: "-76.928304153809"
lat: "38.98365305"
,
name: "Microbiology Building"
code: "MCB"
number: "231"
lng: "-76.9434034376443"
lat: "38.98811525"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "052"
lng: "-76.9395651325614"
lat: "38.98670195"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "MNT"
number: "032"
lng: "-76.9396753369829"
lat: "38.9820505"
,
name: "PI:NAME:<NAME>END_PI"
code: "MOR"
number: "040"
lng: "-76.944173786884"
lat: "38.9843126"
,
name: "Motor Transportation Facility"
code: ""
number: "011"
lng: "-76.9336288120943"
lat: "38.98618425"
,
name: "Motorcycle Storage Building"
code: ""
number: "020"
lng: "-76.9361307352417"
lat: "38.98564745"
,
name: "MPI:NAME:<NAME>END_PIatt Lane Parking Garage"
code: ""
number: "404"
lng: "-76.9455747840072"
lat: "38.98182585"
,
name: "Mowatt Lane Substation"
code: ""
number: "400"
lng: "-76.9477371"
lat: "38.98371275"
,
name: "Neutral Buoyancy Research Facility"
code: ""
number: "382"
lng: "-76.9389810221139"
lat: "38.99288895"
,
name: "NOAA Center for Weather and Climate Prediction"
code: ""
number: "F04"
lng: "-76.9247140879834"
lat: "38.97199165"
,
name: "Nyumburu Cultural Center"
code: "NCC"
number: "232"
lng: "-76.9438057866404"
lat: "38.9881339"
,
name: "Oakland Hall (Residence Hall)"
code: ""
number: "419"
lng: "-76.9492226797435"
lat: "38.9938547"
,
name: "Paint Branch Drive Visitors Lot"
code: ""
number: "p194"
lng: "-76.9376652129763"
lat: "38.99197045"
,
name: "Parking Lot 11b"
code: ""
number: "p100"
lng: "-76.9362203954441"
lat: "38.9937827"
,
name: "Parking Lot 11c"
code: ""
number: "p101"
lng: "-76.93793265456"
lat: "38.99389735"
,
name: "Parking Lot 11h"
code: ""
number: "p102"
lng: "-76.9353551969982"
lat: "38.98254495"
,
name: "Parking Lot 15"
code: ""
number: "p103"
lng: "-76.9357555594133"
lat: "38.9810048"
,
name: "Parking Lot 16a"
code: ""
number: "p104"
lng: "-76.93612975"
lat: "38.98352975"
,
name: "Parking Lot 16b"
code: ""
number: "p105"
lng: "-76.9364642095338"
lat: "38.9830317"
,
name: "Parking Lot 16c"
code: ""
number: "p106"
lng: "-76.9319854884635"
lat: "38.98323445"
,
name: "Parking Lot 16d"
code: ""
number: "p107"
lng: "-76.9323682"
lat: "38.98234055"
,
name: "Parking Lot 16e"
code: ""
number: "p108"
lng: "-76.933294157523"
lat: "38.982799"
,
name: "Parking lot 16f"
code: ""
number: "p109"
lng: "-76.9339050233523"
lat: "38.9831728"
,
name: "Parking Lot 16h"
code: ""
number: "p110"
lng: "-76.9315366103807"
lat: "38.98314145"
,
name: "Parking Lot 19"
code: ""
number: "p253"
lng: "-76.94504517113101"
lat: "38.98196015"
,
name: "Parking Lot 1b"
code: ""
number: "p111"
lng: "-76.9494408398075"
lat: "38.9870827"
,
name: "Parking Lot 1d"
code: ""
number: "p112"
lng: "-76.9506524287848"
lat: "38.9861006"
,
name: "Parking Lot 2a"
code: ""
number: "p113"
lng: "-76.9481586165606"
lat: "38.99420335"
,
name: "Parking Lot 2b"
code: ""
number: "p114"
lng: "-76.9505459201508"
lat: "38.9931883"
,
name: "Parking Lot 2c"
code: ""
number: "p115"
lng: "-76.9477756507565"
lat: "38.99230265"
,
name: "Parking Lot 2d"
code: ""
number: "p116"
lng: "-76.9455376145164"
lat: "38.99238785"
,
name: "Parking Lot 2f"
code: ""
number: "p118"
lng: "-76.9414993575919"
lat: "38.99257355"
,
name: "Parking Lot 2g"
code: ""
number: "p236"
lng: "-76.9472887836851"
lat: "38.99440375"
,
name: "Parking Lot 4b"
code: ""
number: "p121"
lng: "-76.9411816982757"
lat: "38.9970546"
,
name: "Parking Lot 4j"
code: ""
number: "p124"
lng: "-76.939228467483"
lat: "38.99721475"
,
name: "Parking Lot 4n"
code: ""
number: "p234"
lng: "-76.9432485836194"
lat: "38.9975986"
,
name: "Parking Lot 5"
code: ""
number: "p126"
lng: "-76.9409605"
lat: "38.9897447"
,
name: "Parking Lot 6"
code: ""
number: "p127"
lng: "-76.9441223999729"
lat: "38.9948837"
,
name: "Parking Lot 9b"
code: ""
number: "p128"
lng: "-76.9379480992313"
lat: "38.99443435"
,
name: "Parking Lot 9c"
code: ""
number: "p129"
lng: "-76.9392026889215"
lat: "38.994263"
,
name: "Parking Lot A"
code: ""
number: "p130"
lng: "-76.9445554411228"
lat: "38.98430415"
,
name: "Parking Lot A*1"
code: ""
number: "p131"
lng: "-76.94812275"
lat: "38.9862722"
,
name: "Parking Lot B"
code: ""
number: "p135"
lng: "-76.9409603"
lat: "38.98961455"
,
name: "Parking Lot BB"
code: ""
number: "p136"
lng: "-76.9437432752211"
lat: "38.98868945"
,
name: "Parking Lot C1"
code: ""
number: "p137"
lng: "-76.9373082470676"
lat: "38.98662605"
,
name: "Parking Lot C2"
code: ""
number: "p138"
lng: "-76.9386466960901"
lat: "38.98525575"
,
name: "Parking Lot CC1"
code: ""
number: "p140"
lng: "-76.9395347604447"
lat: "38.99236635"
,
name: "Parking Lot CC2"
code: ""
number: "p141"
lng: "-76.940687"
lat: "38.9916206"
,
name: "Parking Lot D"
code: ""
number: "p143"
lng: "-76.94503695"
lat: "38.98512635"
,
name: "Parking Lot D"
code: ""
number: "p257"
lng: "-76.9453188124738"
lat: "38.98552325"
,
name: "Parking Lot E"
code: ""
number: "p125"
lng: "-76.9371846"
lat: "38.98927645"
,
name: "Parking Lot E*1"
code: ""
number: "p146"
lng: "-76.93950635"
lat: "38.98895295"
,
name: "Parking Lot E*2"
code: ""
number: "p147"
lng: "-76.93874685"
lat: "38.98936235"
,
name: "Parking Lot EE"
code: ""
number: "p148"
lng: "-76.93777255"
lat: "38.98956665"
,
name: "Parking Lot F"
code: ""
number: "p149"
lng: "-76.9362193313411"
lat: "38.98280315"
,
name: "Parking Lot FF"
code: ""
number: "p150"
lng: "-76.9399114407625"
lat: "38.99431135"
,
name: "Parking Lot FF2"
code: ""
number: "p151"
lng: "-76.9392185247061"
lat: "38.9931298"
,
name: "Parking Lot G"
code: ""
number: "p258"
lng: "-76.9385438831386"
lat: "38.9919303"
,
name: "Parking Lot H1"
code: ""
number: "p152"
lng: "-76.9408901957625"
lat: "38.98714445"
,
name: "Parking Lot HH1"
code: ""
number: "p155"
lng: "-76.9437705364812"
lat: "38.9871964"
,
name: "Parking Lot HH2"
code: ""
number: "p156"
lng: "-76.9487217884698"
lat: "38.99179395"
,
name: "Parking Lot I*"
code: ""
number: "p158"
lng: "-76.9369211101377"
lat: "38.99243845"
,
name: "Parking Lot II1"
code: ""
number: "p239"
lng: "-76.93916665"
lat: "38.9896806"
,
name: "Parking Lot JJ1"
code: ""
number: "p159"
lng: "-76.9483331340347"
lat: "38.98539355"
,
name: "Parking Lot JJ2"
code: ""
number: "p160"
lng: "-76.9493049661468"
lat: "38.9850641"
,
name: "Parking Lot JJ3"
code: ""
number: "p241"
lng: "-76.9491042944466"
lat: "38.98614505"
,
name: "Parking Lot K*2"
code: ""
number: "p162"
lng: "-76.936078711566"
lat: "38.98664725"
,
name: "Parking Lot K*4"
code: ""
number: "p163"
lng: "-76.9348330271331"
lat: "38.986413"
,
name: "Parking Lot K*5"
code: ""
number: "p250"
lng: "-76.9349324800649"
lat: "38.98521105"
,
name: "Parking Lot K1"
code: ""
number: "p164"
lng: "-8564367.45346"
lat: "4719533.97406"
,
name: "Parking Lot K2"
code: ""
number: "p165"
lng: "-76.9346156087241"
lat: "38.98554405"
,
name: "Parking Lot K4"
code: ""
number: "p235"
lng: "-76.9429211998132"
lat: "38.99640555"
,
name: "Parking Lot K5"
code: ""
number: "p167"
lng: "-76.9363026926599"
lat: "38.98296465"
,
name: "Parking Lot K6"
code: ""
number: "p168"
lng: "-76.9548861803616"
lat: "38.99083205"
,
name: "Parking Lot KK"
code: ""
number: "p169"
lng: "-76.939126849749"
lat: "38.99074565"
,
name: "Parking Lot KK1"
code: ""
number: "p237"
lng: "-76.9390710660194"
lat: "38.99103585"
,
name: "Parking Lot L"
code: ""
number: "p170"
lng: "-76.9390440880309"
lat: "38.985601"
,
name: "Parking Lot L*"
code: ""
number: "p171"
lng: "-76.9395731763922"
lat: "38.98602365"
,
name: "Parking Lot ML*"
code: ""
number: "p254"
lng: "-76.94504546108689"
lat: "38.9820502"
,
name: "Parking Lot MM1"
code: ""
number: "p172"
lng: "-76.9442824224091"
lat: "38.99208515"
,
name: "Parking Lot MM2"
code: ""
number: "p173"
lng: "-76.9416584717757"
lat: "38.9924268"
,
name: "Parking Lot MM3"
code: ""
number: "p174"
lng: "-76.94307075"
lat: "38.99134275"
,
name: "Parking Lot N"
code: ""
number: "p178"
lng: "-76.9483259502209"
lat: "38.992758"
,
name: "Parking Lot N*"
code: ""
number: "p177"
lng: "-76.9496207605017"
lat: "38.99315285"
,
name: "Parking Lot N*"
code: ""
number: "p183"
lng: "-76.9488709100114"
lat: "38.99306485"
,
name: "Parking Lot N*1"
code: ""
number: "p246"
lng: "-76.9399408810552"
lat: "38.982763"
,
name: "Parking Lot N*2"
code: ""
number: "p175"
lng: "-76.9388435512149"
lat: "38.98243265"
,
name: "Parking Lot N*3"
code: ""
number: "p176"
lng: "-76.943423876098"
lat: "38.9826911"
,
name: "Parking Lot N3"
code: ""
number: "p179"
lng: "-76.9455457475625"
lat: "38.9927533"
,
name: "Parking Lot N4"
code: ""
number: "p180"
lng: "-76.94428505"
lat: "38.9925133"
,
name: "Parking Lot N5"
code: ""
number: "p181"
lng: "-76.9321558294979"
lat: "38.9835147"
,
name: "Parking Lot N7"
code: ""
number: "p182"
lng: "-76.9477738109564"
lat: "38.9925869"
,
name: "Parking Lot N9"
code: ""
number: "p184"
lng: "-76.946957493951"
lat: "38.99306985"
,
name: "Parking Lot O1"
code: ""
number: "p185"
lng: "-76.94791655"
lat: "38.98399525"
,
name: "Parking Lot O3"
code: ""
number: "p187"
lng: "-76.9483002446715"
lat: "38.98442185"
,
name: "Parking Lot O4"
code: ""
number: "p188"
lng: "-76.9462971999815"
lat: "38.9843053"
,
name: "Parking Lot O5"
code: ""
number: "p189"
lng: "-76.9477835"
lat: "38.98355155"
,
name: "Parking Lot P*"
code: ""
number: "p191"
lng: "-76.94270605"
lat: "38.9981942"
,
name: "Parking Lot P1"
code: ""
number: "p192"
lng: "-76.9431896439374"
lat: "38.998125"
,
name: "Parking Lot P2"
code: ""
number: "p193"
lng: "-76.9421021500106"
lat: "38.99898715"
,
name: "Parking Lot PP*"
code: ""
number: "p195"
lng: "-76.9403724850234"
lat: "38.99385655"
,
name: "Parking Lot PP1"
code: ""
number: "p196"
lng: "-76.9420853788895"
lat: "38.9933824"
,
name: "Parking Lot PP2"
code: ""
number: "p197"
lng: "-76.9406987273544"
lat: "38.99385655"
,
name: "Parking Lot Q"
code: ""
number: "p198"
lng: "-76.9438760840556"
lat: "38.9911695"
,
name: "Parking Lot Q1"
code: ""
number: "p262"
lng: "-76.9434800720619"
lat: "38.9911726"
,
name: "Parking Lot R*"
code: ""
number: "p199"
lng: "-76.9456938440546"
lat: "38.9893944"
,
name: "Parking Lot R3"
code: ""
number: "p200"
lng: "-76.9455308111322"
lat: "38.9902248"
,
name: "Parking Lot R4"
code: ""
number: "p201"
lng: "-76.95470385"
lat: "38.99062525"
,
name: "Parking Lot RR"
code: ""
number: "p203"
lng: "-76.9409602"
lat: "38.98948445"
,
name: "Parking Lot RR1"
code: ""
number: "p251"
lng: "-76.9344902879538"
lat: "38.9869681"
,
name: "Parking Lot RR2"
code: ""
number: "p265"
lng: "-76.9379107465909"
lat: "38.9948436"
,
name: "Parking Lot RR2"
code: ""
number: "p252"
lng: "-76.9343316115429"
lat: "38.9876128"
,
name: "Parking Lot S3"
code: ""
number: "p204"
lng: "-76.9430233373487"
lat: "38.9925145"
,
name: "Parking Lot S4"
code: ""
number: "p205"
lng: "-76.943739453674"
lat: "38.98246725"
,
name: "Parking Lot S5"
code: ""
number: "p206"
lng: "-76.9494324090596"
lat: "38.9933015"
,
name: "Parking Lot S7"
code: ""
number: "p207"
lng: "-76.9477725756751"
lat: "38.99283285"
,
name: "Parking Lot S8"
code: ""
number: "p208"
lng: "-76.9467035382421"
lat: "38.99288295"
,
name: "Parking Lot SD*"
code: ""
number: "p209"
lng: "-76.9489187705445"
lat: "38.9902444"
,
name: "Parking Lot SS1"
code: ""
number: "p211"
lng: "-76.9441549116248"
lat: "38.9937615"
,
name: "Parking Lot SS2"
code: ""
number: "p212"
lng: "-76.9463360969161"
lat: "38.99397245"
,
name: "Parking Lot SS3"
code: ""
number: "p213"
lng: "-76.94667609821"
lat: "38.9942645"
,
name: "Parking Lot T"
code: ""
number: "p215"
lng: "-76.9377298240488"
lat: "38.98986685"
,
name: "Parking Lot TT"
code: ""
number: "p217"
lng: "-76.94036895"
lat: "38.98852595"
,
name: "Parking Lot U1"
code: ""
number: "p218"
lng: "-76.9437394"
lat: "38.9826911"
,
name: "Parking Lot U6"
code: ""
number: "p260"
lng: "-76.9450241678599"
lat: "38.9828241"
,
name: "Parking Lot U6"
code: ""
number: "p247"
lng: "-76.9449759499812"
lat: "38.9828063"
,
name: "Parking Lot UU"
code: ""
number: "p224"
lng: "-76.9398296251258"
lat: "38.99054885"
,
name: "Parking Lot W"
code: ""
number: "p225"
lng: "-76.9413860371152"
lat: "38.98486265"
,
name: "Parking Lot W1"
code: ""
number: "p244"
lng: "-76.9424845941378"
lat: "38.98494595"
,
name: "Parking Lot XX1"
code: ""
number: "p226"
lng: "-76.935617290379"
lat: "38.9900403"
,
name: "Parking Lot XX2"
code: ""
number: "p227"
lng: "-76.9367968153632"
lat: "38.99147625"
,
name: "Parking Lot XX4"
code: ""
number: "p229"
lng: "-76.937826475598"
lat: "38.99316725"
,
name: "Parking Lot XX5"
code: ""
number: "p233"
lng: "-76.93692095"
lat: "38.9909658"
,
name: "Parking Lot Y"
code: ""
number: "p230"
lng: "-76.9419977518114"
lat: "38.9840286"
,
name: "Parking Lot YC"
code: ""
number: "p231"
lng: "-76.9480388560794"
lat: "38.993677"
,
name: "Parking Lot Z"
code: ""
number: "p240"
lng: "-76.9485514812047"
lat: "38.98813685"
,
name: "Parking Lot Z*"
code: ""
number: "p232"
lng: "-76.9467581"
lat: "38.9887473"
,
name: "Parking Lot Z1"
code: ""
number: "p238"
lng: "-76.9475152"
lat: "38.9890159"
,
name: "Patapsco Building"
code: ""
number: "805"
lng: "-76.9249429909109"
lat: "38.9767149"
,
name: "Patuxent Building"
code: ""
number: "010"
lng: "-76.943483756875"
lat: "38.98863025"
,
name: "Pest Control Trailer"
code: ""
number: "385"
lng: "-76.9335922924701"
lat: "38.98515375"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "130"
lng: "-76.93463845"
lat: "38.9845988"
,
name: "PI:NAME:<NAME>END_PI Sigma Sorority"
code: ""
number: "171"
lng: "-76.9356071135738"
lat: "38.9813563"
,
name: "Physical Distribution Center (Terrapin Trader)"
code: ""
number: "383"
lng: "-76.9292279695603"
lat: "38.9819567"
,
name: "Physics Building"
code: "PHY"
number: "082"
lng: "-76.94007719774"
lat: "38.9886972"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "127"
lng: "-76.936200475"
lat: "38.9846297"
,
name: "Plant Operations and Maintenance"
code: ""
number: "217"
lng: "-76.9340028269637"
lat: "38.98600515"
,
name: "Plant Operations and Maintenance Shop Building"
code: ""
number: "055"
lng: "-76.9334591527876"
lat: "38.98503875"
,
name: "Plant Operations and Maintenance Shop2"
code: ""
number: "101"
lng: "-76.9344044678528"
lat: "38.98657335"
,
name: "Plant Operations and Maintenance Shop3"
code: ""
number: "212"
lng: "-76.9342274025325"
lat: "38.9867869"
,
name: "Plant Operations and Maintenance Shops"
code: ""
number: "006"
lng: "-76.93452175"
lat: "38.9858077"
,
name: "Plant Operations and Maintenance Warehouse"
code: ""
number: "012"
lng: "-76.9339846304955"
lat: "38.98636865"
,
name: "Plant Science Building"
code: "PLS"
number: "036"
lng: "-76.9413596237531"
lat: "38.98873715"
,
name: "Pocomoke Building"
code: ""
number: "007"
lng: "-76.9371601416079"
lat: "38.98295055"
,
name: "Police Impound"
code: ""
number: "p249"
lng: "-76.9312547643577"
lat: "38.98301005"
,
name: "Police Substation"
code: ""
number: "018"
lng: "-76.9355837584007"
lat: "38.9825402"
,
name: "Potomac Building"
code: ""
number: "092"
lng: "-76.93830195"
lat: "38.9903918"
,
name: "PreinkPI:NAME:<NAME>END_PI Field House"
code: "PKT"
number: "054"
lng: "-76.9461320708538"
lat: "38.9844511"
,
name: "Presidents Residence"
code: ""
number: "164"
lng: "-76.9523499501432"
lat: "38.9885031"
,
name: "PI:NAME:<NAME>END_PI"
code: "PGG"
number: "021"
lng: "-76.941826725"
lat: "38.9825857"
,
name: "Pump PI:NAME:<NAME>END_PI"
code: ""
number: "200"
lng: "-76.9465122"
lat: "38.9886431"
,
name: "PI:NAME:<NAME>END_PI"
code: "QAN"
number: "061"
lng: "-76.9460068545931"
lat: "38.98519025"
,
name: "PI:NAME:<NAME>END_PI"
code: "ARM"
number: "078"
lng: "-76.9389662139866"
lat: "38.9860268"
,
name: "Recreation Artificial Turf Field"
code: ""
number: "d102"
lng: "-76.9391820716749"
lat: "38.9954899"
,
name: "Recycling Center"
code: ""
number: "107"
lng: "-76.9372141594106"
lat: "38.99566315"
,
name: "Regents Drive Parking Garage"
code: ""
number: "202"
lng: "-76.9414582777038"
lat: "38.9897313"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "398"
lng: "-76.942996197787"
lat: "38.997127"
,
name: "PI:NAME:<NAME>END_PI"
code: "RIT"
number: "004"
lng: "-76.936456447182"
lat: "38.98504805"
,
name: "PI:NAME:<NAME>END_PI Stadium"
code: ""
number: "409"
lng: "-76.9397458168949"
lat: "38.99629445"
,
name: "PI:NAME:<NAME>END_PIl Center for Jewish Life"
code: ""
number: "c105"
lng: "-76.9487503080679"
lat: "38.98270895"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "080"
lng: "-76.9376116836577"
lat: "38.9853423"
,
name: "Route One Annex"
code: ""
number: "e8400"
lng: "-76.9332763982874"
lat: "38.99373045"
,
name: "PI:NAME:<NAME>END_PI IV Alumni Center"
code: ""
number: "407"
lng: "-76.9490713634912"
lat: "38.9894604"
,
name: "Satellite Central Utilities Building 1"
code: "SCUB 1"
number: "019"
lng: "-76.9404483999786"
lat: "38.98202525"
,
name: "Satellite Central Utilities Building 2"
code: "SCUB 2"
number: "067"
lng: "-76.9446545340067"
lat: "38.9835376"
,
name: "Satellite Central Utilities Building 3"
code: "SCUB 3"
number: "392"
lng: "-76.9459512158262"
lat: "38.98909655"
,
name: "Satellite Central Utilities Building 4"
code: "SCUB 4"
number: "405"
lng: "-76.9383176"
lat: "38.98979345"
,
name: "School of Public Health"
code: "SPH"
number: "255"
lng: "-76.9431633838994"
lat: "38.9934922"
,
name: "Security Booth (Campus Drive)"
code: ""
number: "295"
lng: "-76.936510313403"
lat: "38.98857565"
,
name: "Security Booth (Stadium Drive)"
code: ""
number: "297"
lng: "-76.95045855"
lat: "38.99198405"
,
name: "Service Building (University of Maryland, Police)"
code: ""
number: "003"
lng: "-76.9361406358543"
lat: "38.98597645"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "109"
lng: "-76.9414407668993"
lat: "38.99169565"
,
name: "ShipPI:NAME:<NAME>END_PI Field House (Baseball)"
code: ""
number: "159"
lng: "-76.9441134376408"
lat: "38.9889419"
,
name: "Shoemaker Building"
code: "SHM"
number: "037"
lng: "-76.942745495777"
lat: "38.9839376"
,
name: "ShPI:NAME:<NAME>END_PI Laboratory"
code: "SHR"
number: "075"
lng: "-76.94191685"
lat: "38.98727505"
,
name: "Shuttle Bus Facility"
code: ""
number: "p123"
lng: "-76.9365504141656"
lat: "38.9955167"
,
name: "Shuttle Bus Facility"
code: ""
number: "013"
lng: "-76.9338891741985"
lat: "38.98692845"
,
name: "Shuttle Bus Trailer 2"
code: ""
number: "410"
lng: "-76.93407295"
lat: "38.9870785"
,
name: "Sigma Delta Tau Sorority"
code: ""
number: "174"
lng: "-76.93587565"
lat: "38.9806496"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "135"
lng: "-76.93514475"
lat: "38.9832453"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "133"
lng: "-76.9341870726646"
lat: "38.98356725"
,
name: "Skinner Building"
code: "SKN"
number: "044"
lng: "-76.9418413304917"
lat: "38.98480955"
,
name: "Somerset Hall (Residence Hall)"
code: "SOM"
number: "063"
lng: "-76.9455473"
lat: "38.9850026"
,
name: "South Campus Commons 1"
code: ""
number: "996"
lng: "-76.9429384672295"
lat: "38.98211405"
,
name: "South Campus Commons 2"
code: ""
number: "997"
lng: "-76.942933891664"
lat: "38.9828397"
,
name: "South Campus Commons 3"
code: ""
number: "998"
lng: "-76.9397267748109"
lat: "38.98147685"
,
name: "South Campus Commons 4"
code: ""
number: "999"
lng: "-76.9414232588701"
lat: "38.98128715"
,
name: "South Campus Commons 5"
code: ""
number: "974"
lng: "-76.94470195"
lat: "38.98275075"
,
name: "South Campus Commons 6"
code: ""
number: "975"
lng: "-76.9446791864948"
lat: "38.98216555"
,
name: "South Campus Commons 7"
code: ""
number: "281"
lng: "-76.9445591280214"
lat: "38.9815547"
,
name: "South Campus Dining Hall"
code: "SDH"
number: "026"
lng: "-76.9436837393588"
lat: "38.983048"
,
name: "South Gate (Regents Dr. and Rt. 1)"
code: ""
number: "401"
lng: "-76.938869"
lat: "38.9823717"
,
name: "Special Education Trailer One"
code: ""
number: "104"
lng: "-76.94795265"
lat: "38.9869185"
,
name: "Special Education Trailer Two"
code: ""
number: "105"
lng: "-76.94795265"
lat: "38.9867484"
,
name: "Special Services Office Building"
code: ""
number: "100"
lng: "-76.93443985"
lat: "38.98651785"
,
name: "St. Mary's Hall (Residence Hall)"
code: "STM"
number: "062"
lng: "-76.9455983683182"
lat: "38.9870071"
,
name: "Stadium Drive Parking Garage"
code: "SDG"
number: "218"
lng: "-76.9489997913519"
lat: "38.9910407"
,
name: "PI:NAME:<NAME>END_PI"
code: "SQH"
number: "233"
lng: "-76.943749743933"
lat: "38.9820745"
,
name: "PI:NAME:<NAME>END_PI"
code: "SYM"
number: "076"
lng: "-76.9406455474374"
lat: "38.9870811"
,
name: "PI:NAME:<NAME>END_PI (Residence Hall)"
code: "TAL"
number: "030"
lng: "-76.9422561"
lat: "38.9833387"
,
name: "PI:NAME:<NAME>END_PI"
code: "TLF"
number: "043"
lng: "-76.9430948392412"
lat: "38.98484635"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "128"
lng: "-76.93563395"
lat: "38.9846469"
,
name: "Tawes Fine Arts Building"
code: "TWS"
number: "141"
lng: "-76.9483238494655"
lat: "38.98598645"
,
name: "Technology Advancement Building"
code: ""
number: "387"
lng: "-76.938615024697"
lat: "38.99251055"
,
name: "Technology Ventures Building"
code: ""
number: "806"
lng: "-76.9255078851059"
lat: "38.97964495"
,
name: "Temporary Building"
code: ""
number: "208"
lng: "-76.9337296097556"
lat: "38.9852606"
,
name: "Temporary Building1"
code: ""
number: "207"
lng: "-76.9327854891442"
lat: "38.9836276"
,
name: "Temporary Building2"
code: ""
number: "204"
lng: "-76.9328960630658"
lat: "38.983738"
,
name: "Terrapin Trail Parking Garage"
code: ""
number: "403"
lng: "-76.9433528356638"
lat: "38.99497"
,
name: "The Diner"
code: ""
number: "257"
lng: "-76.9466253202392"
lat: "38.9925679"
,
name: "The Domain"
code: ""
number: "a004"
lng: "-76.9493582912198"
lat: "38.98418695"
,
name: "The Varsity"
code: ""
number: "a003"
lng: "-76.9344594915205"
lat: "38.99146725"
,
name: "Track/Soccer Ticket Booth"
code: ""
number: "389"
lng: "-76.9498700750045"
lat: "38.9874593"
,
name: "PI:NAME:<NAME>END_PI (Dairy/Visitor Center)"
code: "TUR"
number: "079"
lng: "-76.9373371743758"
lat: "38.98614065"
,
name: "PI:NAME:<NAME>END_PI"
code: "TYD"
number: "042"
lng: "-76.9440007655894"
lat: "38.9848516"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "361"
lng: "-76.9478028839735"
lat: "38.98970955"
,
name: "UMUC Hotel"
code: ""
number: "348"
lng: "-76.9552482403172"
lat: "38.9860612"
,
name: "UMUC Inn and Conference Center (ICC)"
code: "ICC"
number: "345"
lng: "-76.9540770364453"
lat: "38.9860659"
,
name: "UMUC Student and Faculty Services Center"
code: "SFSC"
number: "346"
lng: "-76.9533396302397"
lat: "38.9866594"
,
name: "UMUC/University College Garage (PGUC)"
code: "PGUC"
number: "347"
lng: "-76.9544177389556"
lat: "38.98699405"
,
name: "Union Lane Parking Garage"
code: "ULG"
number: "179"
lng: "-76.9458968038597"
lat: "38.98840855"
,
name: "University Baptist Church"
code: ""
number: "c100"
lng: "-76.95374930076"
lat: "38.98425975"
,
name: "University Hills Apartments1"
code: ""
number: "288"
lng: "-76.9562455643617"
lat: "38.98514415"
,
name: "University Hills Apartments2"
code: ""
number: "287"
lng: "-76.9572075604853"
lat: "38.9850375"
,
name: "University Hills Apartments3"
code: ""
number: "286"
lng: "-76.9582383073659"
lat: "38.98494235"
,
name: "University Hills Apartments4"
code: ""
number: "292"
lng: "-76.9583539620815"
lat: "38.98442205"
,
name: "University Hills Apartments5"
code: ""
number: "291"
lng: "-76.9573552914719"
lat: "38.9844605"
,
name: "University Hills Apartments6"
code: ""
number: "290"
lng: "-76.9562790737231"
lat: "38.9843953"
,
name: "University Hills Apartments7"
code: ""
number: "289"
lng: "-76.9558676954921"
lat: "38.98447435"
,
name: "University United Methodist Church"
code: ""
number: "c101"
lng: "-76.9513539156316"
lat: "38.98421265"
,
name: "University View"
code: ""
number: "a001"
lng: "-76.9344298975591"
lat: "38.99256455"
,
name: "University View II"
code: ""
number: "a002"
lng: "-76.9336210376858"
lat: "38.99251945"
,
name: "Univesity Research Center (North)"
code: ""
number: "F05"
lng: "-76.9222887214757"
lat: "38.9722927"
,
name: "Univesity Research Center (South)"
code: ""
number: "F06"
lng: "-76.9218988073354"
lat: "38.970879"
,
name: "PI:NAME:<NAME>END_PI"
code: "VMH"
number: "039"
lng: "-76.9470382916646"
lat: "38.9830467"
,
name: "Varsity Sports Teamhouse"
code: ""
number: "158"
lng: "-76.9451814170622"
lat: "38.99006865"
,
name: "Washington Hall (Residence Hall)"
code: "WSH"
number: "023"
lng: "-76.9413924749198"
lat: "38.9818076"
,
name: "WPI:NAME:<NAME>END_PIico Hall (Residence Hall)"
code: "WIC"
number: "069"
lng: "-76.9458442795105"
lat: "38.9837427"
,
name: "Wind Tunnel Building"
code: "WTU"
number: "081"
lng: "-76.9368484609687"
lat: "38.9899002"
,
name: "WPI:NAME:<NAME>END_PIs Hall"
code: "WDS"
number: "047"
lng: "-76.9418451233709"
lat: "38.9851106"
,
name: "WPI:NAME:<NAME>END_PIster Hall (Residence Hall)"
code: "WOR"
number: "051"
lng: "-76.9449993420465"
lat: "38.98467285"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "139"
lng: "-76.9371918048901"
lat: "38.9832052"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "138"
lng: "-76.9366895"
lat: "38.9832584"
,
name: "PI:NAME:<NAME>END_PI"
code: ""
number: "137"
lng: "-76.9361831249379"
lat: "38.98325535"
]
|
[
{
"context": "y is not required when using this method.\r\n@author Nathan Klick\r\n@copyright QRef 2012\r\n###\r\nclass AuthorizeAppleP",
"end": 231,
"score": 0.9997896552085876,
"start": 219,
"tag": "NAME",
"value": "Nathan Klick"
}
] | Workspace/QRef/NodeServer/src/specification/request/rpc/AuthorizeAppleProductRequest.coffee | qrefdev/qref | 0 | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to perform user authentication.
@note The token property is not required when using this method.
@author Nathan Klick
@copyright QRef 2012
###
class AuthorizeAppleProductRequest extends RpcRequest
###
@property [ObjectId] Required - The ID of the product for which to authorize a purchase.
###
product: null
###
@property [String] Required - The base64 encoded receipt block from apple IAP libraries.
###
receipt: null
###
@property [String] Optional - The tailNumber to assign to the newly created checklist.
###
tailNumber: null
module.exports = exports = AuthorizeAppleProductRequest | 153132 | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to perform user authentication.
@note The token property is not required when using this method.
@author <NAME>
@copyright QRef 2012
###
class AuthorizeAppleProductRequest extends RpcRequest
###
@property [ObjectId] Required - The ID of the product for which to authorize a purchase.
###
product: null
###
@property [String] Required - The base64 encoded receipt block from apple IAP libraries.
###
receipt: null
###
@property [String] Optional - The tailNumber to assign to the newly created checklist.
###
tailNumber: null
module.exports = exports = AuthorizeAppleProductRequest | true | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to perform user authentication.
@note The token property is not required when using this method.
@author PI:NAME:<NAME>END_PI
@copyright QRef 2012
###
class AuthorizeAppleProductRequest extends RpcRequest
###
@property [ObjectId] Required - The ID of the product for which to authorize a purchase.
###
product: null
###
@property [String] Required - The base64 encoded receipt block from apple IAP libraries.
###
receipt: null
###
@property [String] Optional - The tailNumber to assign to the newly created checklist.
###
tailNumber: null
module.exports = exports = AuthorizeAppleProductRequest |
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7975887656211853,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/nl/spec.coffee | saiba-mais/bible-lessons | 149 | bcv_parser = require("../../js/nl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (nl)", ->
`
expect(p.parse("Eerste Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Beresjiet 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gn 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("BERESJIET 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (nl)", ->
`
expect(p.parse("Tweede Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Sjemot 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("SJEMOT 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (nl)", ->
`
expect(p.parse("Bel en de draak 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (nl)", ->
`
expect(p.parse("Derde Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Leviticus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Vajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Wajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lv 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITICUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("VAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("WAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (nl)", ->
`
expect(p.parse("Vierde Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bamidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bemidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numberi 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Nu 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERDE MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BAMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BEMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMBERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NU 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (nl)", ->
`
expect(p.parse("Wijsheid van Jozua Ben Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Wijsheid van Jezus Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Wijsheid van Ben Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jezus Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (nl)", ->
`
expect(p.parse("De wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Het boek der wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (nl)", ->
`
expect(p.parse("Klaagliederen 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Kl 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDEREN 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (nl)", ->
`
expect(p.parse("Brief van Jeremia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (nl)", ->
`
expect(p.parse("Openbaring van Johannes 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaringen 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apocalyps 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apc 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apk 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING VAN JOHANNES 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARINGEN 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APOCALYPS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APC 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APK 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (nl)", ->
`
expect(p.parse("Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (nl)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Vijfde Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dewariem 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dt 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIJFDE MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEWARIEM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (nl)", ->
`
expect(p.parse("Jozua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (nl)", ->
`
expect(p.parse("Richteren 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rechters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richtere 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Recht 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richt 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Re 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTEREN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHTERE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (nl)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rt 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (nl)", ->
`
expect(p.parse("Eerste Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Eerste Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (nl)", ->
`
expect(p.parse("Tweede Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Tweede Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (nl)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Js 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JS 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (nl)", ->
`
expect(p.parse("Tweede Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Samuel II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("SAMUEL II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (nl)", ->
`
expect(p.parse("Eerste Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Eerste Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e. Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e. Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Eerste Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Samuël 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Samuel I 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUEL I 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (nl)", ->
`
expect(p.parse("Tweede Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (nl)", ->
`
expect(p.parse("Eerste Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Ko 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KO 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (nl)", ->
`
expect(p.parse("Tweede Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("Tweede Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("TWEEDE KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (nl)", ->
`
expect(p.parse("Eerste Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Eerste Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("EERSTE KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (nl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (nl)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (nl)", ->
`
expect(p.parse("Ester \(Grieks\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Grieks) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr.) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Est gr 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (nl)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (nl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (nl)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (nl)", ->
`
expect(p.parse("Gebed van Azarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (nl)", ->
`
expect(p.parse("Spreuken 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (nl)", ->
`
expect(p.parse("Koheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qoheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pr 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("KOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (nl)", ->
`
expect(p.parse("Gezang der drie mannen in het vuur 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lied van de drie jongemannen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (nl)", ->
`
expect(p.parse("Canticum canticorum 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hl 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("CANTICUM CANTICORUM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HL 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (nl)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jr 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JR 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (nl)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezechiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezech 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECHIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECH 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (nl)", ->
`
expect(p.parse("Daniel 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Da 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DA 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (nl)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (nl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (nl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (nl)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (nl)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (nl)", ->
`
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mica 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (nl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (nl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (nl)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (nl)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggaï 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAÏ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (nl)", ->
`
expect(p.parse("Zacharia 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (nl)", ->
`
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (nl)", ->
`
expect(p.parse("Evangelie volgens Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Evangelie volgens Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matth 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mat 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("EVANGELIE VOLGENS MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MAT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (nl)", ->
`
expect(p.parse("Evangelie volgens Marcus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Evangelie volgens Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marcus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("EVANGELIE VOLGENS MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (nl)", ->
`
expect(p.parse("Evangelie volgens Lucas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Evangelie volgens Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lucas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("EVANGELIE VOLGENS LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (nl)", ->
`
expect(p.parse("Eerste Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Eerste Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("EERSTE JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (nl)", ->
`
expect(p.parse("Tweede Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Tweede Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TWEEDE JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (nl)", ->
`
expect(p.parse("Derde Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Derde Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("DERDE JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (nl)", ->
`
expect(p.parse("Evangelie volgens Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (nl)", ->
`
expect(p.parse("Handelingen van de apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen der apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hnd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGEN VAN DE APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN DER APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (nl)", ->
`
expect(p.parse("Romeinenbrief 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romeinen 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINENBRIEF 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMEINEN 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (nl)", ->
`
expect(p.parse("Tweede Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (nl)", ->
`
expect(p.parse("Eerste Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (nl)", ->
`
expect(p.parse("Galatenbrief 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galaten 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATENBRIEF 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATEN 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (nl)", ->
`
expect(p.parse("Efeziers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efeziërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efez 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (nl)", ->
`
expect(p.parse("Filippenzen 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filip 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENZEN 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIP 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (nl)", ->
`
expect(p.parse("Colossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("COLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (nl)", ->
`
expect(p.parse("Tweede Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (nl)", ->
`
expect(p.parse("Eerste Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (nl)", ->
`
expect(p.parse("Tweede Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (nl)", ->
`
expect(p.parse("Eerste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (nl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (nl)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filémon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Film 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILÉMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (nl)", ->
`
expect(p.parse("Hebreeen 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreeën 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREEEN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREEËN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (nl)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (nl)", ->
`
expect(p.parse("Tweede Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pe 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (nl)", ->
`
expect(p.parse("Eerste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pe 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (nl)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (nl)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobías 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobia 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobía 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (nl)", ->
`
expect(p.parse("Judith 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (nl)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (nl)", ->
`
expect(p.parse("Susanna 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (nl)", ->
`
expect(p.parse("Tweede Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (nl)", ->
`
expect(p.parse("Derde Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (nl)", ->
`
expect(p.parse("Vierde Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (nl)", ->
`
expect(p.parse("Eerste Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Mak 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezek,Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek,Ezra (nl)", ->
`
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["nl"]
it "should handle ranges (nl)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (nl)", ->
expect(p.parse("Titus 1:1, hoofdstukken 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUKKEN 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hoofdstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (nl)", ->
expect(p.parse("Exod 1:1 verzen 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERZEN 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (nl)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 zie ook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ZIE OOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (nl)", ->
expect(p.parse("Ps 3 opschrift, 4:2, 5:opschrift").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 OPSCHRIFT, 4:2, 5:OPSCHRIFT").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (nl)", ->
expect(p.parse("Rev 3en volgende verzen, 4:2en volgende verzen").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 EN VOLGENDE VERZEN, 4:2 EN VOLGENDE VERZEN").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (nl)", ->
expect(p.parse("Lev 1 (GNB96)").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("lev 1 gnb96").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("Lev 1 (NB)").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("lev 1 nb").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("Lev 1 (NBG51)").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("lev 1 nbg51").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("Lev 1 (NBV)").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("lev 1 nbv").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("Lev 1 (SV)").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("lev 1 sv").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("Lev 1 (SV77)").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("lev 1 sv77").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("Lev 1 (WV95)").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
expect(p.parse("lev 1 wv95").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
it "should handle book ranges (nl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Eerste - Derde Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (nl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 178898 | bcv_parser = require("../../js/nl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (nl)", ->
`
expect(p.parse("Eerste Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Beresjiet 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gn 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("BERESJIET 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (nl)", ->
`
expect(p.parse("Tweede Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Sjemot 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("SJEMOT 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (nl)", ->
`
expect(p.parse("Bel en de draak 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (nl)", ->
`
expect(p.parse("Derde Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Leviticus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Vajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Wajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lv 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITICUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("VAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("WAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (nl)", ->
`
expect(p.parse("Vierde Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bamidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bemidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numberi 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Nu 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERDE MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BAMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BEMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMBERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NU 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (nl)", ->
`
expect(p.parse("<NAME> <NAME> 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Wi<NAME> 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Wijs<NAME> 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (nl)", ->
`
expect(p.parse("De wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Het boek der wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (nl)", ->
`
expect(p.parse("Klaagliederen 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Kl 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDEREN 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (nl)", ->
`
expect(p.parse("Brief <NAME> Jer<NAME>ia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (nl)", ->
`
expect(p.parse("Openbaring van Johannes 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaringen 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apocalyps 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apc 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apk 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING VAN J<NAME> 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARINGEN 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APOCALYPS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APC 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APK 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (nl)", ->
`
expect(p.parse("Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (nl)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Vijfde Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dewariem 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dt 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIJFDE MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEWARIEM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book <NAME>osh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("Jozua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("<NAME>osh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (nl)", ->
`
expect(p.parse("Richteren 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rechters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richtere 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Recht 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richt 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Re 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTEREN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHTERE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (nl)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rt 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (nl)", ->
`
expect(p.parse("Eerste Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Eerste Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (nl)", ->
`
expect(p.parse("Tweede Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Tweede Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (nl)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Js 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JS 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (nl)", ->
`
expect(p.parse("Tweede <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede Sam<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Sam<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Samuel II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("SAMUEL II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (nl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samu<NAME>l 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Sam<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUEL I 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (nl)", ->
`
expect(p.parse("Tweede Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (nl)", ->
`
expect(p.parse("Eerste Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Ko 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KO 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (nl)", ->
`
expect(p.parse("Tweede Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("Tweede Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("TWEEDE KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (nl)", ->
`
expect(p.parse("Eerste Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Eerste Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("EER<NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. <NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. <NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E K<NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. K<NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("EERSTE KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. K<NAME> 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (nl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (nl)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (nl)", ->
`
expect(p.parse("Ester \(Grieks\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Grieks) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr.) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Est gr 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (nl)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (nl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (nl)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (nl)", ->
`
expect(p.parse("Gebed van Azarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (nl)", ->
`
expect(p.parse("Spreuken 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (nl)", ->
`
expect(p.parse("Koheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qoheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pr 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("KOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (nl)", ->
`
expect(p.parse("Gezang der drie mannen in het vuur 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lied van de drie jongemannen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (nl)", ->
`
expect(p.parse("Canticum canticorum 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hl 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("CANTICUM CANTICORUM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HL 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book <NAME> (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jr 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JR 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>zek (nl)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezechiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezech 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECHIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECH 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book <NAME> (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Da 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DA 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (nl)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (nl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (nl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (nl)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME> (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book <NAME>ic (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ic (nl)", ->
`
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mica 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (nl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (nl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (nl)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (nl)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggaï 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAÏ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (nl)", ->
`
expect(p.parse("Zacharia 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (nl)", ->
`
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (nl)", ->
`
expect(p.parse("Evangelie volgens Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Evangelie volgens Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matth 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mat 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("EVANGELIE VOLGENS MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MAT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("<NAME> <NAME> 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("<NAME> volgens <NAME> 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("EVANGELIE VOLGENS MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("<NAME> volgens <NAME> 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("<NAME> volgens <NAME> 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("<NAME>ukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("EVANGELIE VOLGENS LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book <NAME>John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>John (nl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Eerste Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("EERSTE JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (nl)", ->
`
expect(p.parse("Tweede Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Tweede Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TWEEDE JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (nl)", ->
`
expect(p.parse("Derde Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Derde Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("DERDE JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("Evangelie volgens Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (nl)", ->
`
expect(p.parse("Handelingen van de apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen der apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hnd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGEN VAN DE APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN DER APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (nl)", ->
`
expect(p.parse("Romeinenbrief 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romeinen 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINENBRIEF 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMEINEN 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (nl)", ->
`
expect(p.parse("Tweede Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (nl)", ->
`
expect(p.parse("Eerste Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Cor<NAME> 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (nl)", ->
`
expect(p.parse("Galatenbrief 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galaten 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATENBRIEF 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATEN 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ph (nl)", ->
`
expect(p.parse("Efeziers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efeziërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efez 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("Filippenzen 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filip 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENZEN 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIP 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (nl)", ->
`
expect(p.parse("Colossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("COLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (nl)", ->
`
expect(p.parse("Tweede Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (nl)", ->
`
expect(p.parse("Eerste Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>Tim (nl)", ->
`
expect(p.parse("Tweede Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (nl)", ->
`
expect(p.parse("Eerste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (nl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (nl)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filémon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Film 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILÉMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (nl)", ->
`
expect(p.parse("Hebreeen 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreeën 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREEEN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREEËN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (nl)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (nl)", ->
`
expect(p.parse("Tweede Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pe 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (nl)", ->
`
expect(p.parse("Eerste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pe 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book <NAME>ude (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ude (nl)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (nl)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobías 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobia 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobía 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (nl)", ->
`
expect(p.parse("Judith 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (nl)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (nl)", ->
`
expect(p.parse("Sus<NAME> 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (nl)", ->
`
expect(p.parse("Tweede Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (nl)", ->
`
expect(p.parse("Derde Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (nl)", ->
`
expect(p.parse("Vierde Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (nl)", ->
`
expect(p.parse("Eerste Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Mak 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezek,Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek,Ezra (nl)", ->
`
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["nl"]
it "should handle ranges (nl)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (nl)", ->
expect(p.parse("Titus 1:1, hoofdstukken 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUKKEN 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hoofdstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (nl)", ->
expect(p.parse("Exod 1:1 verzen 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERZEN 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (nl)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 zie ook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ZIE OOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (nl)", ->
expect(p.parse("Ps 3 opschrift, 4:2, 5:opschrift").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 OPSCHRIFT, 4:2, 5:OPSCHRIFT").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (nl)", ->
expect(p.parse("Rev 3en volgende verzen, 4:2en volgende verzen").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 EN VOLGENDE VERZEN, 4:2 EN VOLGENDE VERZEN").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (nl)", ->
expect(p.parse("Lev 1 (GNB96)").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("lev 1 gnb96").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("Lev 1 (NB)").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("lev 1 nb").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("Lev 1 (NBG51)").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("lev 1 nbg51").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("Lev 1 (NBV)").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("lev 1 nbv").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("Lev 1 (SV)").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("lev 1 sv").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("Lev 1 (SV77)").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("lev 1 sv77").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("Lev 1 (WV95)").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
expect(p.parse("lev 1 wv95").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
it "should handle book ranges (nl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Eerste - Der<NAME> Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (nl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/nl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (nl)", ->
`
expect(p.parse("Eerste Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Beresjiet 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1e Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mozes 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gn 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("BERESJIET 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1E MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOZES 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (nl)", ->
`
expect(p.parse("Tweede Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2e Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mozes 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Sjemot 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Ex 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2E MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOZES 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("SJEMOT 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EX 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (nl)", ->
`
expect(p.parse("Bel en de draak 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (nl)", ->
`
expect(p.parse("Derde Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Leviticus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3e Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mozes 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Vajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Wajikra 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lv 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITICUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3E MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOZES 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("VAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("WAJIKRA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (nl)", ->
`
expect(p.parse("Vierde Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bamidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Bemidbar 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mozes 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numberi 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Nu 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("VIERDE MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BAMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("BEMIDBAR 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOZES 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMBERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NU 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("WiPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("WijsPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Ecclesiasticus 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirach 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (nl)", ->
`
expect(p.parse("De wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Het boek der wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid van Salomo 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wijsheid 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (nl)", ->
`
expect(p.parse("Klaagliederen 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Kl 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDEREN 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (nl)", ->
`
expect(p.parse("Brief PI:NAME:<NAME>END_PI JerPI:NAME:<NAME>END_PIia 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (nl)", ->
`
expect(p.parse("Openbaring van Johannes 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaringen 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apocalyps 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Openb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apc 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Apk 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING VAN JPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARINGEN 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APOCALYPS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OPENB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APC 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("APK 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (nl)", ->
`
expect(p.parse("Manasse 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (nl)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Vijfde Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dewariem 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mozes 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Dt 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIJFDE MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEWARIEM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOZES 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIosh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("Jozua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("PI:NAME:<NAME>END_PIosh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (nl)", ->
`
expect(p.parse("Richteren 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rechters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richtere 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Recht 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Richt 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Re 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Ri 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RICHTEREN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHTERE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RECHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RICHT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RE 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RI 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (nl)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rt 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (nl)", ->
`
expect(p.parse("Eerste Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Eerste Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Derde Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esdras 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("III Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3e Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("3 Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Ezra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (nl)", ->
`
expect(p.parse("Tweede Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Tweede Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("Vierde Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Esdras 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2e Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4. Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("IV Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("4 Ezra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (nl)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Js 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JS 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (nl)", ->
`
expect(p.parse("Tweede PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Tweede PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Samuel II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuël 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2e Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TWEEDE SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("SAMUEL II 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUËL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2E SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 S 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SamuPI:NAME:<NAME>END_PIl 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1e Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("EERSTE SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUËL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUEL I 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1E SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 S 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (nl)", ->
`
expect(p.parse("Tweede Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Koningen 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Tweede Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2e Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Ko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KONINGEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TWEEDE KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2E KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (nl)", ->
`
expect(p.parse("Eerste Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Koningen 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Eerste Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1e Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Ko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Ko 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KONINGEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("EERSTE KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1E KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KO 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (nl)", ->
`
expect(p.parse("Tweede Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronieken 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("Tweede Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2e Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIEKEN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("TWEEDE KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2E KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (nl)", ->
`
expect(p.parse("Eerste Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Eerste Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kronieken 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1e Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("EERPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E KPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. KPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("EERSTE KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRONIEKEN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E. KPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1E KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (nl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (nl)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (nl)", ->
`
expect(p.parse("Ester \(Grieks\) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Grieks) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr.) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Ester (Gr) 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Est gr 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (nl)", ->
`
expect(p.parse("Esther 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTHER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (nl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (nl)", ->
`
expect(p.parse("Psalmen 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (nl)", ->
`
expect(p.parse("Gebed van Azarja 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (nl)", ->
`
expect(p.parse("Spreuken 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (nl)", ->
`
expect(p.parse("Koheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qoheleth 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Kohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Qohelet 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pr 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("KOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELETH 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("KOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("QOHELET 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (nl)", ->
`
expect(p.parse("Gezang der drie mannen in het vuur 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Lied van de drie jongemannen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (nl)", ->
`
expect(p.parse("Canticum canticorum 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hl 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("CANTICUM CANTICORUM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HL 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jr 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JR 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIzek (nl)", ->
`
expect(p.parse("Ezechiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezechiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezech 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZECHIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECHIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZECH 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Da 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DA 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (nl)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (nl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (nl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (nl)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIic (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIic (nl)", ->
`
expect(p.parse("Micha 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mica 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mi 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MICHA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MICA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MI 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (nl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (nl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (nl)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (nl)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggaï 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAÏ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (nl)", ->
`
expect(p.parse("Zacharia 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zach 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZACHARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZACH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (nl)", ->
`
expect(p.parse("Maleachi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEACHI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (nl)", ->
`
expect(p.parse("Evangelie volgens Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Evangelie volgens Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matth 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mat 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("EVANGELIE VOLGENS MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MAT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI volgens PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mc 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mk 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("EVANGELIE VOLGENS MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARCUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MC 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI volgens PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI volgens PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("PI:NAME:<NAME>END_PIukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lc 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("EVANGELIE VOLGENS LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUCAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LC 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIJohn (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIJohn (nl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Eerste Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1e Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("EERSTE JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1E JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (nl)", ->
`
expect(p.parse("Tweede Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Tweede Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2e Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TWEEDE JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2E JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (nl)", ->
`
expect(p.parse("Derde Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Derde Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3e Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("DERDE JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("DERDE JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3E JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("Evangelie volgens Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANGELIE VOLGENS JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (nl)", ->
`
expect(p.parse("Handelingen van de apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen der apostelen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Handelingen 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hnd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGEN VAN DE APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN DER APOSTELEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HANDELINGEN 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (nl)", ->
`
expect(p.parse("Romeinenbrief 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romeinen 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINENBRIEF 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMEINEN 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (nl)", ->
`
expect(p.parse("Tweede Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintier 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiër 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Corinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthe 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Tweede Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2e Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIER 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 CORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TWEEDE KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2E KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (nl)", ->
`
expect(p.parse("Eerste Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintier 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiër 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CorPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Eerste Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Corinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthe 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1e Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIER 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("EERSTE KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I CORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1E KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (nl)", ->
`
expect(p.parse("Galatenbrief 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galaten 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATENBRIEF 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATEN 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIph (nl)", ->
`
expect(p.parse("Efeziers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efeziërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efez 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEZ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("Filippenzen 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filip 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENZEN 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIP 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (nl)", ->
`
expect(p.parse("Colossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossenzen 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("COLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSENZEN 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (nl)", ->
`
expect(p.parse("Tweede Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonicenzen 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Tweede Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2e Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tes 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONICENZEN 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TWEEDE TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2E TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TES 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (nl)", ->
`
expect(p.parse("Eerste Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonicenzen 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Eerste Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1e Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONICENZEN 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("EERSTE TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1E TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PITim (nl)", ->
`
expect(p.parse("Tweede Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timotheüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteüs 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Tweede Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2e Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTHEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEÜS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TWEEDE TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2E TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (nl)", ->
`
expect(p.parse("Eerste Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timotheüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Eerste Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteüs 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1e Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTHEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("EERSTE TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEÜS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1E TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (nl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (nl)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filémon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Film 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILÉMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (nl)", ->
`
expect(p.parse("Hebreeen 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreeën 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREEEN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREEËN 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (nl)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (nl)", ->
`
expect(p.parse("Tweede Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Tweede Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petr 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2e Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pe 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TWEEDE PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TWEEDE PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETR 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2E PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (nl)", ->
`
expect(p.parse("Eerste Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Eerste Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1e Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petr 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pe 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("EERSTE PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("EERSTE PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1E PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETR 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIude (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIude (nl)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (nl)", ->
`
expect(p.parse("Tobias 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobías 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobia 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobit 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobía 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (nl)", ->
`
expect(p.parse("Judith 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Judit 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (nl)", ->
`
expect(p.parse("Baruch 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (nl)", ->
`
expect(p.parse("SusPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (nl)", ->
`
expect(p.parse("Tweede Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeen 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Makkabeeën 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("Tweede Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2e Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 Mak 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (nl)", ->
`
expect(p.parse("Derde Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeen 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Makkabeeën 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("Derde Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3e Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 Mak 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (nl)", ->
`
expect(p.parse("Vierde Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeen 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Makkabeeën 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Vierde Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 Mak 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (nl)", ->
`
expect(p.parse("Eerste Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeen 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Makkabeeën 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Eerste Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1e Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 Mak 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I Mak 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Localized book Ezek,Ezra (nl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek,Ezra (nl)", ->
`
expect(p.parse("Ez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["nl"]
it "should handle ranges (nl)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (nl)", ->
expect(p.parse("Titus 1:1, hoofdstukken 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUKKEN 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hoofdstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFDSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (nl)", ->
expect(p.parse("Exod 1:1 verzen 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERZEN 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 vs 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v. 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V. 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 v 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm V 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (nl)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vgl 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VGL 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 zie ook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ZIE OOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (nl)", ->
expect(p.parse("Ps 3 opschrift, 4:2, 5:opschrift").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 OPSCHRIFT, 4:2, 5:OPSCHRIFT").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (nl)", ->
expect(p.parse("Rev 3en volgende verzen, 4:2en volgende verzen").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 EN VOLGENDE VERZEN, 4:2 EN VOLGENDE VERZEN").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (nl)", ->
expect(p.parse("Lev 1 (GNB96)").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("lev 1 gnb96").osis_and_translations()).toEqual [["Lev.1", "GNB96"]]
expect(p.parse("Lev 1 (NB)").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("lev 1 nb").osis_and_translations()).toEqual [["Lev.1", "NB"]]
expect(p.parse("Lev 1 (NBG51)").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("lev 1 nbg51").osis_and_translations()).toEqual [["Lev.1", "NBG51"]]
expect(p.parse("Lev 1 (NBV)").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("lev 1 nbv").osis_and_translations()).toEqual [["Lev.1", "NBV"]]
expect(p.parse("Lev 1 (SV)").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("lev 1 sv").osis_and_translations()).toEqual [["Lev.1", "SV"]]
expect(p.parse("Lev 1 (SV77)").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("lev 1 sv77").osis_and_translations()).toEqual [["Lev.1", "SV77"]]
expect(p.parse("Lev 1 (WV95)").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
expect(p.parse("lev 1 wv95").osis_and_translations()).toEqual [["Lev.1", "WV95"]]
it "should handle book ranges (nl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Eerste - DerPI:NAME:<NAME>END_PI Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (nl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "cs/#info.info\n\nFramer.Info =\n\ttitle: \"\"\n\tauthor: \"Tony\"\n\ttwitter: \"\"\n\tdescription: \"\"\n\n\nbg = new Layer\n\t",
"end": 146,
"score": 0.9990713596343994,
"start": 142,
"tag": "NAME",
"value": "Tony"
}
] | 63instagramStories.framer/app.coffee | gremjua-forks/100daysofframer | 26 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "Tony"
twitter: ""
description: ""
bg = new Layer
width: 750
height: 1334
image: "images/Screenshot_IGvsSC_03.jpg"
bgCover = new Layer
size: Screen, backgroundColor: "#fff", opacity: 0
bgCover.states.add on: opacity: 0.6
bgCover.states.animationOptions = time: 0.3
profileBlock = new Layer
size: 113, borderRadius: "50%", backgroundColor: "#fff"
x: 26, y: 160
profileBlock.states.add on: opacity: 0
profileBlock.states.animationOptions = time: 0.1
vidWrap = new Layer
size: Screen, x: 0, y: 0, opacity: 0
backgroundColor: "", shadowY: 10, shadowBlur: 100, clip: true
vidWrap.states.add on: opacity: 1
vidWrap.states.animationOptions = time: 0.2
vidPaused = true
vid = new VideoLayer
width: 750, height: 1920/(1080/750)
scale: 0, originX: 0.05, originY: 0.15, parent: vidWrap
video: "images/IMG_4089.MOV"
vid.states.add on: scale: 1
vid.states.animationOptions = time: 0.35
profile = new Layer
width: 112, height: 112, x: 26, y: 160
image: "images/profile.png"
profile.states.add on: scale: 0.7, x: 5, y: 40
profile.states.animationOptions = time: 0.1
shadowDown = new Layer
width: Screen.width, height: 70, backgroundColor: "", parent: vid
style: backgroundImage: "linear-gradient(to bottom, rgba(0,0,0,0.7) 0%, rgba(0,0,0,0) 100%)"
for bar in [0..7]
if bar < 4
op = 0.95
else
op = 0.5
bar = new Layer
width: Screen.width / 8.8, height: 5, parent: shadowDown
backgroundColor: "#fff", opacity: op
x: 12 + bar * Screen.width / 8.2, y: 22
current = new Layer
width: 0, backgroundColor: "#fff", parent: shadowDown
x: 378, height: 5, y: 22
current.states.add on: width: Screen.width/9
current.states.animationOptions = curve: "linear", time: 4.2
profile.onClick ->
run()
vid.onClick ->
run()
run = ->
profileBlock.states.next()
vidWrap.states.next()
vid.states.next()
profile.states.next()
current.states.next()
bgCover.states.next()
vid.player.play()
if vidPaused is true
profile.onAnimationEnd ->
@parent = vidWrap
vidPaused = false
else
profile.parent = null
profile.onAnimationEnd ->
profile.parent = null
vidPaused = true
vidWrap.draggable.enabled = true
vidWrap.draggable.horizontal = false
vidWrap.draggable.constraints =
x: 0, y: 0, width: Screen.width, height: Screen.height + 350
vidWrap.onDrag ->
vid.player.pause()
vidWrap.borderRadius = 9
vidWrap.scale = Utils.modulate(vidWrap.y, [0, 300], [1, 0.96], true)
vidWrap.onDragEnd ->
profile.x = 26
profile.y = 120
run()
vid.player.pause()
| 107216 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "<NAME>"
twitter: ""
description: ""
bg = new Layer
width: 750
height: 1334
image: "images/Screenshot_IGvsSC_03.jpg"
bgCover = new Layer
size: Screen, backgroundColor: "#fff", opacity: 0
bgCover.states.add on: opacity: 0.6
bgCover.states.animationOptions = time: 0.3
profileBlock = new Layer
size: 113, borderRadius: "50%", backgroundColor: "#fff"
x: 26, y: 160
profileBlock.states.add on: opacity: 0
profileBlock.states.animationOptions = time: 0.1
vidWrap = new Layer
size: Screen, x: 0, y: 0, opacity: 0
backgroundColor: "", shadowY: 10, shadowBlur: 100, clip: true
vidWrap.states.add on: opacity: 1
vidWrap.states.animationOptions = time: 0.2
vidPaused = true
vid = new VideoLayer
width: 750, height: 1920/(1080/750)
scale: 0, originX: 0.05, originY: 0.15, parent: vidWrap
video: "images/IMG_4089.MOV"
vid.states.add on: scale: 1
vid.states.animationOptions = time: 0.35
profile = new Layer
width: 112, height: 112, x: 26, y: 160
image: "images/profile.png"
profile.states.add on: scale: 0.7, x: 5, y: 40
profile.states.animationOptions = time: 0.1
shadowDown = new Layer
width: Screen.width, height: 70, backgroundColor: "", parent: vid
style: backgroundImage: "linear-gradient(to bottom, rgba(0,0,0,0.7) 0%, rgba(0,0,0,0) 100%)"
for bar in [0..7]
if bar < 4
op = 0.95
else
op = 0.5
bar = new Layer
width: Screen.width / 8.8, height: 5, parent: shadowDown
backgroundColor: "#fff", opacity: op
x: 12 + bar * Screen.width / 8.2, y: 22
current = new Layer
width: 0, backgroundColor: "#fff", parent: shadowDown
x: 378, height: 5, y: 22
current.states.add on: width: Screen.width/9
current.states.animationOptions = curve: "linear", time: 4.2
profile.onClick ->
run()
vid.onClick ->
run()
run = ->
profileBlock.states.next()
vidWrap.states.next()
vid.states.next()
profile.states.next()
current.states.next()
bgCover.states.next()
vid.player.play()
if vidPaused is true
profile.onAnimationEnd ->
@parent = vidWrap
vidPaused = false
else
profile.parent = null
profile.onAnimationEnd ->
profile.parent = null
vidPaused = true
vidWrap.draggable.enabled = true
vidWrap.draggable.horizontal = false
vidWrap.draggable.constraints =
x: 0, y: 0, width: Screen.width, height: Screen.height + 350
vidWrap.onDrag ->
vid.player.pause()
vidWrap.borderRadius = 9
vidWrap.scale = Utils.modulate(vidWrap.y, [0, 300], [1, 0.96], true)
vidWrap.onDragEnd ->
profile.x = 26
profile.y = 120
run()
vid.player.pause()
| true | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "PI:NAME:<NAME>END_PI"
twitter: ""
description: ""
bg = new Layer
width: 750
height: 1334
image: "images/Screenshot_IGvsSC_03.jpg"
bgCover = new Layer
size: Screen, backgroundColor: "#fff", opacity: 0
bgCover.states.add on: opacity: 0.6
bgCover.states.animationOptions = time: 0.3
profileBlock = new Layer
size: 113, borderRadius: "50%", backgroundColor: "#fff"
x: 26, y: 160
profileBlock.states.add on: opacity: 0
profileBlock.states.animationOptions = time: 0.1
vidWrap = new Layer
size: Screen, x: 0, y: 0, opacity: 0
backgroundColor: "", shadowY: 10, shadowBlur: 100, clip: true
vidWrap.states.add on: opacity: 1
vidWrap.states.animationOptions = time: 0.2
vidPaused = true
vid = new VideoLayer
width: 750, height: 1920/(1080/750)
scale: 0, originX: 0.05, originY: 0.15, parent: vidWrap
video: "images/IMG_4089.MOV"
vid.states.add on: scale: 1
vid.states.animationOptions = time: 0.35
profile = new Layer
width: 112, height: 112, x: 26, y: 160
image: "images/profile.png"
profile.states.add on: scale: 0.7, x: 5, y: 40
profile.states.animationOptions = time: 0.1
shadowDown = new Layer
width: Screen.width, height: 70, backgroundColor: "", parent: vid
style: backgroundImage: "linear-gradient(to bottom, rgba(0,0,0,0.7) 0%, rgba(0,0,0,0) 100%)"
for bar in [0..7]
if bar < 4
op = 0.95
else
op = 0.5
bar = new Layer
width: Screen.width / 8.8, height: 5, parent: shadowDown
backgroundColor: "#fff", opacity: op
x: 12 + bar * Screen.width / 8.2, y: 22
current = new Layer
width: 0, backgroundColor: "#fff", parent: shadowDown
x: 378, height: 5, y: 22
current.states.add on: width: Screen.width/9
current.states.animationOptions = curve: "linear", time: 4.2
profile.onClick ->
run()
vid.onClick ->
run()
run = ->
profileBlock.states.next()
vidWrap.states.next()
vid.states.next()
profile.states.next()
current.states.next()
bgCover.states.next()
vid.player.play()
if vidPaused is true
profile.onAnimationEnd ->
@parent = vidWrap
vidPaused = false
else
profile.parent = null
profile.onAnimationEnd ->
profile.parent = null
vidPaused = true
vidWrap.draggable.enabled = true
vidWrap.draggable.horizontal = false
vidWrap.draggable.constraints =
x: 0, y: 0, width: Screen.width, height: Screen.height + 350
vidWrap.onDrag ->
vid.player.pause()
vidWrap.borderRadius = 9
vidWrap.scale = Utils.modulate(vidWrap.y, [0, 300], [1, 0.96], true)
vidWrap.onDragEnd ->
profile.x = 26
profile.y = 120
run()
vid.player.pause()
|
[
{
"context": "ntroller\", ->\n\n\tbeforeEach ->\n\t\t@readOnlyToken = 'somereadonlytoken'\n\t\t@readAndWriteToken = '42somereadandwritetoken'",
"end": 577,
"score": 0.9974727630615234,
"start": 560,
"tag": "PASSWORD",
"value": "somereadonlytoken"
},
{
"context": "ken = 'somereadonlyt... | test/UnitTests/coffee/TokenAccess/TokenAccessControllerTests.coffee | HasanSanli/web-sharelatex | 0 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/TokenAccess/TokenAccessController"
expect = require("chai").expect
ObjectId = require("mongojs").ObjectId
MockRequest = require('../helpers/MockRequest')
MockResponse = require('../helpers/MockResponse')
Errors = require "../../../../app/js/Features/Errors/Errors.js"
describe "TokenAccessController", ->
beforeEach ->
@readOnlyToken = 'somereadonlytoken'
@readAndWriteToken = '42somereadandwritetoken'
@projectId = ObjectId()
@ownerId = 'owner'
@project =
_id: @projectId
publicAccesLevel: 'tokenBased'
tokens:
readOnly: @readOnlyToken
readAndWrite: @readAndWriteToken
owner_ref: @ownerId
@userId = ObjectId()
@TokenAccessController = SandboxedModule.require modulePath, requires:
'../Project/ProjectController': @ProjectController = {}
'../Authentication/AuthenticationController': @AuthenticationController = {}
'./TokenAccessHandler': @TokenAccessHandler = {}
'logger-sharelatex': {log: sinon.stub(), err: sinon.stub()}
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'readAndWriteToken', ->
beforeEach ->
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when there is no user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(null)
describe 'when anonymous read-write access is enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = true
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readAndWriteToken
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when anonymous read-write access is not enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = false
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should redirect to restricted page', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith('/restricted')).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when user is present', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(@userId.toString())
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken
.calledWith(@readAndWriteToken)
).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'readOnlyToken', ->
beforeEach ->
describe 'with a user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
##
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'anonymous', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(null)
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readOnlyToken
))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@req._anonymousAccessToken).to.equal @readOnlyToken
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
| 134421 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/TokenAccess/TokenAccessController"
expect = require("chai").expect
ObjectId = require("mongojs").ObjectId
MockRequest = require('../helpers/MockRequest')
MockResponse = require('../helpers/MockResponse')
Errors = require "../../../../app/js/Features/Errors/Errors.js"
describe "TokenAccessController", ->
beforeEach ->
@readOnlyToken = '<PASSWORD>'
@readAndWriteToken = '<PASSWORD>'
@projectId = ObjectId()
@ownerId = 'owner'
@project =
_id: @projectId
publicAccesLevel: 'tokenBased'
tokens:
readOnly: @readOnlyToken
readAndWrite: @readAndWriteToken
owner_ref: @ownerId
@userId = ObjectId()
@TokenAccessController = SandboxedModule.require modulePath, requires:
'../Project/ProjectController': @ProjectController = {}
'../Authentication/AuthenticationController': @AuthenticationController = {}
'./TokenAccessHandler': @TokenAccessHandler = {}
'logger-sharelatex': {log: sinon.stub(), err: sinon.stub()}
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'readAndWriteToken', ->
beforeEach ->
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when there is no user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(null)
describe 'when anonymous read-write access is enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = true
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readAndWriteToken
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when anonymous read-write access is not enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = false
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = <PASSWORD>Token
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should redirect to restricted page', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith('/restricted')).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when user is present', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(@userId.toString())
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken
.calledWith(@readAndWriteToken)
).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'readOnlyToken', ->
beforeEach ->
describe 'with a user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
##
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAnd<PASSWORD>Token
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'anonymous', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(null)
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readOnlyToken
))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@req._anonymousAccessToken).to.equal @readOnlyToken
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
| true | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/TokenAccess/TokenAccessController"
expect = require("chai").expect
ObjectId = require("mongojs").ObjectId
MockRequest = require('../helpers/MockRequest')
MockResponse = require('../helpers/MockResponse')
Errors = require "../../../../app/js/Features/Errors/Errors.js"
describe "TokenAccessController", ->
beforeEach ->
@readOnlyToken = 'PI:PASSWORD:<PASSWORD>END_PI'
@readAndWriteToken = 'PI:PASSWORD:<PASSWORD>END_PI'
@projectId = ObjectId()
@ownerId = 'owner'
@project =
_id: @projectId
publicAccesLevel: 'tokenBased'
tokens:
readOnly: @readOnlyToken
readAndWrite: @readAndWriteToken
owner_ref: @ownerId
@userId = ObjectId()
@TokenAccessController = SandboxedModule.require modulePath, requires:
'../Project/ProjectController': @ProjectController = {}
'../Authentication/AuthenticationController': @AuthenticationController = {}
'./TokenAccessHandler': @TokenAccessHandler = {}
'logger-sharelatex': {log: sinon.stub(), err: sinon.stub()}
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'readAndWriteToken', ->
beforeEach ->
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when there is no user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(null)
describe 'when anonymous read-write access is enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = true
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readAndWriteToken
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when anonymous read-write access is not enabled', ->
beforeEach ->
@TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED = false
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = PI:PASSWORD:<PASSWORD>END_PIToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should give the user session token access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should redirect to restricted page', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith('/restricted')).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when user is present', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId =
sinon.stub().returns(@userId.toString())
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken
.calledWith(@readAndWriteToken)
).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadAndWriteUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'readOnlyToken', ->
beforeEach ->
describe 'with a user', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(@userId.toString())
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when the user is already the owner', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@project.owner_ref = @userId
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
##
describe 'when findProject does not find a project', ->
beforeEach ->
describe 'when token access is off, but user has higher access anyway', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@res.redirect = sinon.stub()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndPI:PASSWORD:<PASSWORD>END_PIToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, @project)
@TokenAccessHandler.addReadAndWriteUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(@readAndWriteToken))
.to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadAndWriteUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 0
done()
it 'should redirect to the canonical project url', (done) ->
expect(@res.redirect.callCount).to.equal 1
expect(@res.redirect.calledWith(302, "/project/#{@project._id}")).to.equal true
done()
describe 'when higher access is not available', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_and_write_token'] = @readAndWriteToken
@TokenAccessHandler.findProjectWithReadAndWriteToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.findProjectWithHigherAccess =
sinon.stub()
.callsArgWith(2, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readAndWriteToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadAndWriteToken.calledWith(
@readAndWriteToken
)).to.equal true
done()
it 'should check if user has higher access to the token project', (done) ->
expect(
@TokenAccessHandler.findProjectWithHigherAccess.callCount
).to.equal 1
done()
it 'should not add the user to the project with read-write access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when adding user to project produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, new Error('woops'))
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 1
expect(@TokenAccessHandler.addReadOnlyUserToProject.calledWith(
@userId.toString(), @projectId
))
.to.equal true
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'anonymous', ->
beforeEach ->
@AuthenticationController.getLoggedInUserId = sinon.stub().returns(null)
@TokenAccessHandler.grantSessionTokenAccess = sinon.stub()
describe 'when all goes well', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, @project)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 1
expect(@TokenAccessHandler.grantSessionTokenAccess.calledWith(
@req, @projectId, @readOnlyToken
))
.to.equal true
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should pass control to loadEditor', (done) ->
expect(@req.params.Project_id).to.equal @projectId.toString()
expect(@req._anonymousAccessToken).to.equal @readOnlyToken
expect(@ProjectController.loadEditor.callCount).to.equal 1
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal true
done()
describe 'when findProject produces an error', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, new Error('woops'))
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should call next with an error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
describe 'when findProject does not find a project', ->
beforeEach ->
@req = new MockRequest()
@res = new MockResponse()
@next = sinon.stub()
@req.params['read_only_token'] = @readOnlyToken
@TokenAccessHandler.findProjectWithReadOnlyToken = sinon.stub()
.callsArgWith(1, null, null)
@TokenAccessHandler.addReadOnlyUserToProject = sinon.stub()
.callsArgWith(2, null)
@ProjectController.loadEditor = sinon.stub()
@TokenAccessController.readOnlyToken @req, @res, @next
it 'should try to find a project with this token', (done) ->
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.callCount)
.to.equal 1
expect(@TokenAccessHandler.findProjectWithReadOnlyToken.calledWith(@readOnlyToken))
.to.equal true
done()
it 'should not give the user session read-only access', (done) ->
expect(@TokenAccessHandler.grantSessionTokenAccess.callCount)
.to.equal 0
done()
it 'should not pass control to loadEditor', (done) ->
expect(@ProjectController.loadEditor.callCount).to.equal 0
expect(@ProjectController.loadEditor.calledWith(@req, @res, @next)).to.equal false
done()
it 'should not add the user to the project with read-only access', (done) ->
expect(@TokenAccessHandler.addReadOnlyUserToProject.callCount)
.to.equal 0
done()
it 'should call next with a not-found error', (done) ->
expect(@next.callCount).to.equal 1
expect(@next.lastCall.args[0]).to.be.instanceof Error
done()
|
[
{
"context": "nder 'reader', {\n empty: true\n author: 'Framesia'\n description: 'Baca artikel'\n keywords",
"end": 670,
"score": 0.9657602310180664,
"start": 662,
"tag": "NAME",
"value": "Framesia"
}
] | src/controllers/read/index.coffee | damaera/framesia-code | 1 | express = require 'express'
router = express.Router()
read = require 'node-readability'
request = require 'request'
extractor = require 'unfluff'
Link = require '../../models/link'
saveLink = require './saveLink'
router.get '/', (req, res, next) ->
{ img } = req.query
if img
urlImg = (img.split '?')[0]
# request = request.defaults { encoding: null }
# request img, (error, response, body) ->
request.get(urlImg).pipe(res)
# request img, {encoding: null}, (err, resp, body) ->
# console.log resp
# res.send body
# console.log img
# res.send img
else
res.render 'reader', {
empty: true
author: 'Framesia'
description: 'Baca artikel'
keywords: 'framesia,artikel'
}
router.post '/', saveLink
module.exports = router
| 157827 | express = require 'express'
router = express.Router()
read = require 'node-readability'
request = require 'request'
extractor = require 'unfluff'
Link = require '../../models/link'
saveLink = require './saveLink'
router.get '/', (req, res, next) ->
{ img } = req.query
if img
urlImg = (img.split '?')[0]
# request = request.defaults { encoding: null }
# request img, (error, response, body) ->
request.get(urlImg).pipe(res)
# request img, {encoding: null}, (err, resp, body) ->
# console.log resp
# res.send body
# console.log img
# res.send img
else
res.render 'reader', {
empty: true
author: '<NAME>'
description: 'Baca artikel'
keywords: 'framesia,artikel'
}
router.post '/', saveLink
module.exports = router
| true | express = require 'express'
router = express.Router()
read = require 'node-readability'
request = require 'request'
extractor = require 'unfluff'
Link = require '../../models/link'
saveLink = require './saveLink'
router.get '/', (req, res, next) ->
{ img } = req.query
if img
urlImg = (img.split '?')[0]
# request = request.defaults { encoding: null }
# request img, (error, response, body) ->
request.get(urlImg).pipe(res)
# request img, {encoding: null}, (err, resp, body) ->
# console.log resp
# res.send body
# console.log img
# res.send img
else
res.render 'reader', {
empty: true
author: 'PI:NAME:<NAME>END_PI'
description: 'Baca artikel'
keywords: 'framesia,artikel'
}
router.post '/', saveLink
module.exports = router
|
[
{
"context": "g, @attachment, index) ->\n options =\n key: \"#{@msg.ts}-attachment-#{index}\"\n msg: @msg\n parent: @\n channel: @m",
"end": 420,
"score": 0.9989128708839417,
"start": 388,
"tag": "KEY",
"value": "\"#{@msg.ts}-attachment-#{index}\""
}
] | src/models/attachment.coffee | atmos/electrogram | 19 | Message = require "./message"
AttachmentElement = require "../react/attachment"
TwitterAttachmentElement = require "../react/attachments/twitter"
YouTubeAttachmentElement = require "../react/attachments/youtube"
SoundCloudAttachmentElement = require "../react/attachments/soundcloud"
class Attachment
constructor: (@msg, @attachment, index) ->
options =
key: "#{@msg.ts}-attachment-#{index}"
msg: @msg
parent: @
channel: @msg.channel
attachment: @attachment
@reactElement = @createReactElement(options)
createReactElement: (options) ->
switch @attachment.service_name
when "twitter"
new React.createElement TwitterAttachmentElement, options
when "YouTube"
new React.createElement YouTubeAttachmentElement, options
when "SoundCloud"
new React.createElement SoundCloudAttachmentElement, options
else
console.log @attachment
console.log "Uncustomized attachment:> #{@attachment.service_name}"
new React.createElement AttachmentElement, options
module.exports = Attachment
| 138042 | Message = require "./message"
AttachmentElement = require "../react/attachment"
TwitterAttachmentElement = require "../react/attachments/twitter"
YouTubeAttachmentElement = require "../react/attachments/youtube"
SoundCloudAttachmentElement = require "../react/attachments/soundcloud"
class Attachment
constructor: (@msg, @attachment, index) ->
options =
key: <KEY>
msg: @msg
parent: @
channel: @msg.channel
attachment: @attachment
@reactElement = @createReactElement(options)
createReactElement: (options) ->
switch @attachment.service_name
when "twitter"
new React.createElement TwitterAttachmentElement, options
when "YouTube"
new React.createElement YouTubeAttachmentElement, options
when "SoundCloud"
new React.createElement SoundCloudAttachmentElement, options
else
console.log @attachment
console.log "Uncustomized attachment:> #{@attachment.service_name}"
new React.createElement AttachmentElement, options
module.exports = Attachment
| true | Message = require "./message"
AttachmentElement = require "../react/attachment"
TwitterAttachmentElement = require "../react/attachments/twitter"
YouTubeAttachmentElement = require "../react/attachments/youtube"
SoundCloudAttachmentElement = require "../react/attachments/soundcloud"
class Attachment
constructor: (@msg, @attachment, index) ->
options =
key: PI:KEY:<KEY>END_PI
msg: @msg
parent: @
channel: @msg.channel
attachment: @attachment
@reactElement = @createReactElement(options)
createReactElement: (options) ->
switch @attachment.service_name
when "twitter"
new React.createElement TwitterAttachmentElement, options
when "YouTube"
new React.createElement YouTubeAttachmentElement, options
when "SoundCloud"
new React.createElement SoundCloudAttachmentElement, options
else
console.log @attachment
console.log "Uncustomized attachment:> #{@attachment.service_name}"
new React.createElement AttachmentElement, options
module.exports = Attachment
|
[
{
"context": "ls for MongoDB\n\n@description: Error types\n@author: Christopher Thorn\n###\nclass @BongoError extends Error\n constructor",
"end": 93,
"score": 0.999864399433136,
"start": 76,
"tag": "NAME",
"value": "Christopher Thorn"
}
] | node_modules_koding/bongo/lib/errortypes.coffee | ezgikaysi/koding | 1 | ###
Bongo.js
Unfancy models for MongoDB
@description: Error types
@author: Christopher Thorn
###
class @BongoError extends Error
constructor:(@message)->
{constructor} = @
{@name} = constructor
Error.call @
Error.captureStackTrace @, constructor
class @ValidationError extends @BongoError
constructor:(message, modelConstructor, options)->
{@path, @value, @option, @errorCode} = options
@modelConstructor = modelConstructor
option = if @option then " !~ #{@option}" else ''
super \
"""
#{message}:
#{@modelConstructor?.name or 'model'}({ \"#{@path.join '.'}\": #{@value} })#{option}
"""
class @ValidationErrors extends @BongoError
constructor:(@errors, message='Errors were encountered during validation.')->
super message
class @SchemaError extends @BongoError
class @IndexError extends @BongoError
| 52035 | ###
Bongo.js
Unfancy models for MongoDB
@description: Error types
@author: <NAME>
###
class @BongoError extends Error
constructor:(@message)->
{constructor} = @
{@name} = constructor
Error.call @
Error.captureStackTrace @, constructor
class @ValidationError extends @BongoError
constructor:(message, modelConstructor, options)->
{@path, @value, @option, @errorCode} = options
@modelConstructor = modelConstructor
option = if @option then " !~ #{@option}" else ''
super \
"""
#{message}:
#{@modelConstructor?.name or 'model'}({ \"#{@path.join '.'}\": #{@value} })#{option}
"""
class @ValidationErrors extends @BongoError
constructor:(@errors, message='Errors were encountered during validation.')->
super message
class @SchemaError extends @BongoError
class @IndexError extends @BongoError
| true | ###
Bongo.js
Unfancy models for MongoDB
@description: Error types
@author: PI:NAME:<NAME>END_PI
###
class @BongoError extends Error
constructor:(@message)->
{constructor} = @
{@name} = constructor
Error.call @
Error.captureStackTrace @, constructor
class @ValidationError extends @BongoError
constructor:(message, modelConstructor, options)->
{@path, @value, @option, @errorCode} = options
@modelConstructor = modelConstructor
option = if @option then " !~ #{@option}" else ''
super \
"""
#{message}:
#{@modelConstructor?.name or 'model'}({ \"#{@path.join '.'}\": #{@value} })#{option}
"""
class @ValidationErrors extends @BongoError
constructor:(@errors, message='Errors were encountered during validation.')->
super message
class @SchemaError extends @BongoError
class @IndexError extends @BongoError
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9994418621063232,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-zlib-invalid-input.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# test uncompressing invalid input
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
nonStringInputs = [
1
true
{
a: 1
}
["a"]
]
console.error "Doing the non-strings"
nonStringInputs.forEach (input) ->
# zlib.gunzip should not throw an error when called with bad input.
assert.doesNotThrow ->
zlib.gunzip input, (err, buffer) ->
# zlib.gunzip should pass the error to the callback.
assert.ok err
return
return
return
console.error "Doing the unzips"
# zlib.Unzip classes need to get valid data, or else they'll throw.
unzips = [
zlib.Unzip()
zlib.Gunzip()
zlib.Inflate()
zlib.InflateRaw()
]
hadError = []
unzips.forEach (uz, i) ->
console.error "Error for " + uz.constructor.name
uz.on "error", (er) ->
console.error "Error event", er
hadError[i] = true
return
uz.on "end", (er) ->
throw new Error("end event should not be emitted " + uz.constructor.name)return
# this will trigger error event
uz.write "this is not valid compressed data."
return
process.on "exit", ->
assert.deepEqual hadError, [
true
true
true
true
], "expect 4 errors"
return
| 5491 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# test uncompressing invalid input
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
nonStringInputs = [
1
true
{
a: 1
}
["a"]
]
console.error "Doing the non-strings"
nonStringInputs.forEach (input) ->
# zlib.gunzip should not throw an error when called with bad input.
assert.doesNotThrow ->
zlib.gunzip input, (err, buffer) ->
# zlib.gunzip should pass the error to the callback.
assert.ok err
return
return
return
console.error "Doing the unzips"
# zlib.Unzip classes need to get valid data, or else they'll throw.
unzips = [
zlib.Unzip()
zlib.Gunzip()
zlib.Inflate()
zlib.InflateRaw()
]
hadError = []
unzips.forEach (uz, i) ->
console.error "Error for " + uz.constructor.name
uz.on "error", (er) ->
console.error "Error event", er
hadError[i] = true
return
uz.on "end", (er) ->
throw new Error("end event should not be emitted " + uz.constructor.name)return
# this will trigger error event
uz.write "this is not valid compressed data."
return
process.on "exit", ->
assert.deepEqual hadError, [
true
true
true
true
], "expect 4 errors"
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# test uncompressing invalid input
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
nonStringInputs = [
1
true
{
a: 1
}
["a"]
]
console.error "Doing the non-strings"
nonStringInputs.forEach (input) ->
# zlib.gunzip should not throw an error when called with bad input.
assert.doesNotThrow ->
zlib.gunzip input, (err, buffer) ->
# zlib.gunzip should pass the error to the callback.
assert.ok err
return
return
return
console.error "Doing the unzips"
# zlib.Unzip classes need to get valid data, or else they'll throw.
unzips = [
zlib.Unzip()
zlib.Gunzip()
zlib.Inflate()
zlib.InflateRaw()
]
hadError = []
unzips.forEach (uz, i) ->
console.error "Error for " + uz.constructor.name
uz.on "error", (er) ->
console.error "Error event", er
hadError[i] = true
return
uz.on "end", (er) ->
throw new Error("end event should not be emitted " + uz.constructor.name)return
# this will trigger error event
uz.write "this is not valid compressed data."
return
process.on "exit", ->
assert.deepEqual hadError, [
true
true
true
true
], "expect 4 errors"
return
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9905102849006653,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "sten common.PORT, ->\n console.log \"TLS server on 127.0.0.1:%d\", common.PORT\n return\n\ns... | test/disabled/test-tls-server.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Example of new TLS API. Test with:
#
# $> openssl s_client -connect localhost:12346 \
# -key test/fixtures/agent.key -cert test/fixtures/agent.crt
#
# $> openssl s_client -connect localhost:12346
#
common = require("../common")
tls = require("tls")
fs = require("fs")
join = require("path").join
key = fs.readFileSync(join(common.fixturesDir, "agent.key")).toString()
cert = fs.readFileSync(join(common.fixturesDir, "agent.crt")).toString()
s = tls.Server(
key: key
cert: cert
ca: []
requestCert: true
rejectUnauthorized: true
)
s.listen common.PORT, ->
console.log "TLS server on 127.0.0.1:%d", common.PORT
return
s.on "authorized", (c) ->
console.log "authed connection"
c.end "bye authorized friend.\n"
return
s.on "unauthorized", (c, e) ->
console.log "unauthed connection: %s", e
c.end "bye unauthorized person.\n"
return
| 32233 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Example of new TLS API. Test with:
#
# $> openssl s_client -connect localhost:12346 \
# -key test/fixtures/agent.key -cert test/fixtures/agent.crt
#
# $> openssl s_client -connect localhost:12346
#
common = require("../common")
tls = require("tls")
fs = require("fs")
join = require("path").join
key = fs.readFileSync(join(common.fixturesDir, "agent.key")).toString()
cert = fs.readFileSync(join(common.fixturesDir, "agent.crt")).toString()
s = tls.Server(
key: key
cert: cert
ca: []
requestCert: true
rejectUnauthorized: true
)
s.listen common.PORT, ->
console.log "TLS server on 127.0.0.1:%d", common.PORT
return
s.on "authorized", (c) ->
console.log "authed connection"
c.end "bye authorized friend.\n"
return
s.on "unauthorized", (c, e) ->
console.log "unauthed connection: %s", e
c.end "bye unauthorized person.\n"
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Example of new TLS API. Test with:
#
# $> openssl s_client -connect localhost:12346 \
# -key test/fixtures/agent.key -cert test/fixtures/agent.crt
#
# $> openssl s_client -connect localhost:12346
#
common = require("../common")
tls = require("tls")
fs = require("fs")
join = require("path").join
key = fs.readFileSync(join(common.fixturesDir, "agent.key")).toString()
cert = fs.readFileSync(join(common.fixturesDir, "agent.crt")).toString()
s = tls.Server(
key: key
cert: cert
ca: []
requestCert: true
rejectUnauthorized: true
)
s.listen common.PORT, ->
console.log "TLS server on 127.0.0.1:%d", common.PORT
return
s.on "authorized", (c) ->
console.log "authed connection"
c.end "bye authorized friend.\n"
return
s.on "unauthorized", (c, e) ->
console.log "unauthed connection: %s", e
c.end "bye unauthorized person.\n"
return
|
[
{
"context": "###\n Lifecycle.js 1.0.4\n (c) 2011, 2012 Kevin Malakoff - http://kmalakoff.github.com/json-serialize/\n L",
"end": 56,
"score": 0.9996787309646606,
"start": 42,
"tag": "NAME",
"value": "Kevin Malakoff"
},
{
"context": "ackbone.js and its licensing:\n https:github.... | src/lifecycle.coffee | ax3l/lifecycle | 2 | ###
Lifecycle.js 1.0.4
(c) 2011, 2012 Kevin Malakoff - http://kmalakoff.github.com/json-serialize/
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Note: some 'extend'-related code from Backbone.js is repeated in this file.
Please see the following for details on Backbone.js and its licensing:
https:github.com/documentcloud/backbone/blob/master/LICENSE
###
# export or create Lifecycle namespace
LC = @LC = if (typeof(exports) != 'undefined') then exports else {}
LC.VERSION = "1.0.4"
################HELPERS - BEGIN#################
isArray = (obj) ->
obj.constructor is Array
copyProps = (dest, source) ->
(dest[key] = value) for key, value of source
return dest
# From Backbone.js (https:github.com/documentcloud/backbone)
`// Shared empty constructor function to aid in prototype-chain creation.
var ctor = function(){};
// Helper function to correctly set up the prototype chain, for subclasses.
// Similar to 'goog.inherits', but uses a hash of prototype properties and
// class properties to be extended.
var inherits = function(parent, protoProps, staticProps) {
var child;
// The constructor function for the new subclass is either defined by you
// (the "constructor" property in your extend definition), or defaulted
// by us to simply call the parent's constructor.
if (protoProps && protoProps.hasOwnProperty('constructor')) {
child = protoProps.constructor;
} else {
child = function(){ parent.apply(this, arguments); };
}
// Inherit class (static) properties from parent.
copyProps(child, parent);
// Set the prototype chain to inherit from parent, without calling
// parent's constructor function.
ctor.prototype = parent.prototype;
child.prototype = new ctor();
// Add prototype properties (instance properties) to the subclass,
// if supplied.
if (protoProps) copyProps(child.prototype, protoProps);
// Add static properties to the constructor function, if supplied.
if (staticProps) copyProps(child, staticProps);
// Correctly set child's 'prototype.constructor'.
child.prototype.constructor = child;
// Set a convenience property in case the parent's prototype is needed later.
child.__super__ = parent.prototype;
return child;
};
// The self-propagating extend function that BacLCone classes use.
var extend = function (protoProps, classProps) {
var child = inherits(this, protoProps, classProps);
child.extend = this.extend;
return child;
};
`
################HELPERS - END#################
# Deduces the type of ownership of an item and if available, it retains it (reference counted) or clones it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `share_collection` - used to disambigate between owning a collection's items (share) and cloning a collection (don't share).
# * `prefer_clone` - used to disambigate when both retain and clone exist. By default retain is prefered (eg. sharing for lower memory footprint).
LC.own = (obj, options) ->
return obj if not obj or (typeof (obj) isnt "object")
options or (options = {})
# own each item in the array
if isArray(obj)
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for value in obj
return obj
else
clone = []
clone.push(LC.own(value, {prefer_clone: options.prefer_clone})) for value in obj
return clone
# own each property in an object
else if options.properties
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for key, value of obj
return obj
else
clone = {}
(clone[key] = LC.own(value, {prefer_clone: options.prefer_clone})) for key, value of obj
return clone
# use retain function
else if obj.retain
return if options.prefer_clone and obj.clone then obj.clone() else obj.retain()
# use clone function
else if obj.clone
return obj.clone()
return obj
# Deduces the type of ownership of an item and if available, it releases it (reference counted) or destroys it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `clear_values` - used to disambigate between clearing disowned items and removing them (by default, they are removed).
# * `remove_values` - used to indicate that the values should be disowned and removed from the collections.
LC.disown = (obj, options={}) ->
return obj if not obj or (typeof (obj) isnt "object")
# disown each item in the array
if isArray(obj)
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[index]=null) for index, value of obj
else
LC.disown(value, {remove_values: options.remove_values}) for value in obj
obj.length = 0 if options.remove_values
# disown each property in an object
else if options.properties
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[key]=null) for key, value of obj
else
(LC.disown(value, {remove_values: options.remove_values}); delete obj[key]) for key, value of obj
# use release function
else if obj.release
obj.release()
# use destroy function
else if obj.destroy
obj.destroy()
return obj
# A simple reference counting class using Coffeescript class construction or JavaScript extend({}) .
# * __destroy() - override for custom cleanup when all references are released. Note: this function is __destroy instead of _destroy due to an incompatibility with a Knockout convention (https:github.com/kmalakoff/knocLCack/pull/17)
class LC.RefCountable
@extend = extend # from BacLCone non-Coffeescript inheritance (use "LC.RefCountable_RCBase.extend({})" in Javascript instead of "class MyClass extends LC.RefCountable")
constructor: ->
@__LC or= {}
@__LC.ref_count = 1
__destroy: -> # NOOP
# reference counting
retain: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count++
return @
release: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count--
@__destroy() unless @__LC.ref_count
return @
refCount: -> return @__LC.ref_count | 204987 | ###
Lifecycle.js 1.0.4
(c) 2011, 2012 <NAME> - http://kmalakoff.github.com/json-serialize/
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Note: some 'extend'-related code from Backbone.js is repeated in this file.
Please see the following for details on Backbone.js and its licensing:
https:github.com/documentcloud/backbone/blob/master/LICENSE
###
# export or create Lifecycle namespace
LC = @LC = if (typeof(exports) != 'undefined') then exports else {}
LC.VERSION = "1.0.4"
################HELPERS - BEGIN#################
isArray = (obj) ->
obj.constructor is Array
copyProps = (dest, source) ->
(dest[key] = value) for key, value of source
return dest
# From Backbone.js (https:github.com/documentcloud/backbone)
`// Shared empty constructor function to aid in prototype-chain creation.
var ctor = function(){};
// Helper function to correctly set up the prototype chain, for subclasses.
// Similar to 'goog.inherits', but uses a hash of prototype properties and
// class properties to be extended.
var inherits = function(parent, protoProps, staticProps) {
var child;
// The constructor function for the new subclass is either defined by you
// (the "constructor" property in your extend definition), or defaulted
// by us to simply call the parent's constructor.
if (protoProps && protoProps.hasOwnProperty('constructor')) {
child = protoProps.constructor;
} else {
child = function(){ parent.apply(this, arguments); };
}
// Inherit class (static) properties from parent.
copyProps(child, parent);
// Set the prototype chain to inherit from parent, without calling
// parent's constructor function.
ctor.prototype = parent.prototype;
child.prototype = new ctor();
// Add prototype properties (instance properties) to the subclass,
// if supplied.
if (protoProps) copyProps(child.prototype, protoProps);
// Add static properties to the constructor function, if supplied.
if (staticProps) copyProps(child, staticProps);
// Correctly set child's 'prototype.constructor'.
child.prototype.constructor = child;
// Set a convenience property in case the parent's prototype is needed later.
child.__super__ = parent.prototype;
return child;
};
// The self-propagating extend function that BacLCone classes use.
var extend = function (protoProps, classProps) {
var child = inherits(this, protoProps, classProps);
child.extend = this.extend;
return child;
};
`
################HELPERS - END#################
# Deduces the type of ownership of an item and if available, it retains it (reference counted) or clones it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `share_collection` - used to disambigate between owning a collection's items (share) and cloning a collection (don't share).
# * `prefer_clone` - used to disambigate when both retain and clone exist. By default retain is prefered (eg. sharing for lower memory footprint).
LC.own = (obj, options) ->
return obj if not obj or (typeof (obj) isnt "object")
options or (options = {})
# own each item in the array
if isArray(obj)
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for value in obj
return obj
else
clone = []
clone.push(LC.own(value, {prefer_clone: options.prefer_clone})) for value in obj
return clone
# own each property in an object
else if options.properties
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for key, value of obj
return obj
else
clone = {}
(clone[key] = LC.own(value, {prefer_clone: options.prefer_clone})) for key, value of obj
return clone
# use retain function
else if obj.retain
return if options.prefer_clone and obj.clone then obj.clone() else obj.retain()
# use clone function
else if obj.clone
return obj.clone()
return obj
# Deduces the type of ownership of an item and if available, it releases it (reference counted) or destroys it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `clear_values` - used to disambigate between clearing disowned items and removing them (by default, they are removed).
# * `remove_values` - used to indicate that the values should be disowned and removed from the collections.
LC.disown = (obj, options={}) ->
return obj if not obj or (typeof (obj) isnt "object")
# disown each item in the array
if isArray(obj)
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[index]=null) for index, value of obj
else
LC.disown(value, {remove_values: options.remove_values}) for value in obj
obj.length = 0 if options.remove_values
# disown each property in an object
else if options.properties
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[key]=null) for key, value of obj
else
(LC.disown(value, {remove_values: options.remove_values}); delete obj[key]) for key, value of obj
# use release function
else if obj.release
obj.release()
# use destroy function
else if obj.destroy
obj.destroy()
return obj
# A simple reference counting class using Coffeescript class construction or JavaScript extend({}) .
# * __destroy() - override for custom cleanup when all references are released. Note: this function is __destroy instead of _destroy due to an incompatibility with a Knockout convention (https:github.com/kmalakoff/knocLCack/pull/17)
class LC.RefCountable
@extend = extend # from BacLCone non-Coffeescript inheritance (use "LC.RefCountable_RCBase.extend({})" in Javascript instead of "class MyClass extends LC.RefCountable")
constructor: ->
@__LC or= {}
@__LC.ref_count = 1
__destroy: -> # NOOP
# reference counting
retain: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count++
return @
release: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count--
@__destroy() unless @__LC.ref_count
return @
refCount: -> return @__LC.ref_count | true | ###
Lifecycle.js 1.0.4
(c) 2011, 2012 PI:NAME:<NAME>END_PI - http://kmalakoff.github.com/json-serialize/
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Note: some 'extend'-related code from Backbone.js is repeated in this file.
Please see the following for details on Backbone.js and its licensing:
https:github.com/documentcloud/backbone/blob/master/LICENSE
###
# export or create Lifecycle namespace
LC = @LC = if (typeof(exports) != 'undefined') then exports else {}
LC.VERSION = "1.0.4"
################HELPERS - BEGIN#################
isArray = (obj) ->
obj.constructor is Array
copyProps = (dest, source) ->
(dest[key] = value) for key, value of source
return dest
# From Backbone.js (https:github.com/documentcloud/backbone)
`// Shared empty constructor function to aid in prototype-chain creation.
var ctor = function(){};
// Helper function to correctly set up the prototype chain, for subclasses.
// Similar to 'goog.inherits', but uses a hash of prototype properties and
// class properties to be extended.
var inherits = function(parent, protoProps, staticProps) {
var child;
// The constructor function for the new subclass is either defined by you
// (the "constructor" property in your extend definition), or defaulted
// by us to simply call the parent's constructor.
if (protoProps && protoProps.hasOwnProperty('constructor')) {
child = protoProps.constructor;
} else {
child = function(){ parent.apply(this, arguments); };
}
// Inherit class (static) properties from parent.
copyProps(child, parent);
// Set the prototype chain to inherit from parent, without calling
// parent's constructor function.
ctor.prototype = parent.prototype;
child.prototype = new ctor();
// Add prototype properties (instance properties) to the subclass,
// if supplied.
if (protoProps) copyProps(child.prototype, protoProps);
// Add static properties to the constructor function, if supplied.
if (staticProps) copyProps(child, staticProps);
// Correctly set child's 'prototype.constructor'.
child.prototype.constructor = child;
// Set a convenience property in case the parent's prototype is needed later.
child.__super__ = parent.prototype;
return child;
};
// The self-propagating extend function that BacLCone classes use.
var extend = function (protoProps, classProps) {
var child = inherits(this, protoProps, classProps);
child.extend = this.extend;
return child;
};
`
################HELPERS - END#################
# Deduces the type of ownership of an item and if available, it retains it (reference counted) or clones it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `share_collection` - used to disambigate between owning a collection's items (share) and cloning a collection (don't share).
# * `prefer_clone` - used to disambigate when both retain and clone exist. By default retain is prefered (eg. sharing for lower memory footprint).
LC.own = (obj, options) ->
return obj if not obj or (typeof (obj) isnt "object")
options or (options = {})
# own each item in the array
if isArray(obj)
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for value in obj
return obj
else
clone = []
clone.push(LC.own(value, {prefer_clone: options.prefer_clone})) for value in obj
return clone
# own each property in an object
else if options.properties
if options.share_collection
LC.own(value, {prefer_clone: options.prefer_clone}) for key, value of obj
return obj
else
clone = {}
(clone[key] = LC.own(value, {prefer_clone: options.prefer_clone})) for key, value of obj
return clone
# use retain function
else if obj.retain
return if options.prefer_clone and obj.clone then obj.clone() else obj.retain()
# use clone function
else if obj.clone
return obj.clone()
return obj
# Deduces the type of ownership of an item and if available, it releases it (reference counted) or destroys it.
# <br/>**Options:**<br/>
# * `properties` - used to disambigate between owning an object and owning each property.<br/>
# * `clear_values` - used to disambigate between clearing disowned items and removing them (by default, they are removed).
# * `remove_values` - used to indicate that the values should be disowned and removed from the collections.
LC.disown = (obj, options={}) ->
return obj if not obj or (typeof (obj) isnt "object")
# disown each item in the array
if isArray(obj)
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[index]=null) for index, value of obj
else
LC.disown(value, {remove_values: options.remove_values}) for value in obj
obj.length = 0 if options.remove_values
# disown each property in an object
else if options.properties
if options.clear_values
(LC.disown(value, {clear_values: options.clear_values}); obj[key]=null) for key, value of obj
else
(LC.disown(value, {remove_values: options.remove_values}); delete obj[key]) for key, value of obj
# use release function
else if obj.release
obj.release()
# use destroy function
else if obj.destroy
obj.destroy()
return obj
# A simple reference counting class using Coffeescript class construction or JavaScript extend({}) .
# * __destroy() - override for custom cleanup when all references are released. Note: this function is __destroy instead of _destroy due to an incompatibility with a Knockout convention (https:github.com/kmalakoff/knocLCack/pull/17)
class LC.RefCountable
@extend = extend # from BacLCone non-Coffeescript inheritance (use "LC.RefCountable_RCBase.extend({})" in Javascript instead of "class MyClass extends LC.RefCountable")
constructor: ->
@__LC or= {}
@__LC.ref_count = 1
__destroy: -> # NOOP
# reference counting
retain: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count++
return @
release: ->
throw "RefCountable: ref_count is corrupt: #{@__LC.ref_count}" if (@__LC.ref_count <= 0)
@__LC.ref_count--
@__destroy() unless @__LC.ref_count
return @
refCount: -> return @__LC.ref_count |
[
{
"context": "dhub.com/v1/messages/?username=4805706128&api_key=c067818f26b8d711672621f4dcec667ab86cdc48\"\n json: \n contacts: ['108260810164275910'",
"end": 652,
"score": 0.9997378587722778,
"start": 612,
"tag": "KEY",
"value": "c067818f26b8d711672621f4dcec667ab86cdc48"
}
] | cmdblu/bac-to-blinky.coffee | octoblu/att-hackathon | 0 | Meshblu = require './src/meshblu'
Device = require './src/device'
{spawn} = require 'child_process'
request = require 'request'
_ = require 'lodash'
device_uuid = process.env.DEVICE_UUID
device_token = process.env.DEVICE_TOKEN
payload_only = process.env.PAYLOAD_ONLY
meshblu_uri = process.env.MESHBLU_URI || 'wss://meshblu.octoblu.com'
send = (meshblu, messages) =>
_.each messages, (msg) =>
console.log 'Sending', JSON.stringify(msg)
meshblu.connection.message msg
sendSMS = =>
console.log('Sending SMS')
request.post
uri: "https://api.sendhub.com/v1/messages/?username=4805706128&api_key=c067818f26b8d711672621f4dcec667ab86cdc48"
json:
contacts: ['108260810164275910']
text: 'RESCUE ME!'
deboucedSend = _.throttle send, 1000
debouncedSMS = _.once sendSMS
face_uuid = '81d1fb81-36aa-11e4-8e5a-919063640dc3'
blinky_uuid = '4c225891-1e4a-11e4-93b0-b35ccdc222f2'
rally_uuid = 'c43462d1-1cea-11e4-861d-89322229e557/3c701ab0-2a69-11e4-ba29-b7d9779a4387'
meshblu = new Meshblu device_uuid, device_token, meshblu_uri, =>
console.log 'ready'
meshblu.connection.subscribe(uuid: device_uuid)
meshblu.onMessage (message) =>
if payload_only
console.log JSON.stringify(message.payload)
else
console.log JSON.stringify(message)
val = message.payload[1] - 500
if val < 0
val = 0
val = (val / 50)*2
if val > 9
val = 9
bac_level = parseInt(val) || 0
messages = []
blinkyMessage = {
devices: [blinky_uuid],
payload: bac_level
}
messages.push blinkyMessage
if bac_level > 4
carMessage = {
devices: [rally_uuid],
payload:
m : 'lock'
}
messages.push carMessage
debouncedSMS()
deboucedSend(meshblu, messages);
| 99218 | Meshblu = require './src/meshblu'
Device = require './src/device'
{spawn} = require 'child_process'
request = require 'request'
_ = require 'lodash'
device_uuid = process.env.DEVICE_UUID
device_token = process.env.DEVICE_TOKEN
payload_only = process.env.PAYLOAD_ONLY
meshblu_uri = process.env.MESHBLU_URI || 'wss://meshblu.octoblu.com'
send = (meshblu, messages) =>
_.each messages, (msg) =>
console.log 'Sending', JSON.stringify(msg)
meshblu.connection.message msg
sendSMS = =>
console.log('Sending SMS')
request.post
uri: "https://api.sendhub.com/v1/messages/?username=4805706128&api_key=<KEY>"
json:
contacts: ['108260810164275910']
text: 'RESCUE ME!'
deboucedSend = _.throttle send, 1000
debouncedSMS = _.once sendSMS
face_uuid = '81d1fb81-36aa-11e4-8e5a-919063640dc3'
blinky_uuid = '4c225891-1e4a-11e4-93b0-b35ccdc222f2'
rally_uuid = 'c43462d1-1cea-11e4-861d-89322229e557/3c701ab0-2a69-11e4-ba29-b7d9779a4387'
meshblu = new Meshblu device_uuid, device_token, meshblu_uri, =>
console.log 'ready'
meshblu.connection.subscribe(uuid: device_uuid)
meshblu.onMessage (message) =>
if payload_only
console.log JSON.stringify(message.payload)
else
console.log JSON.stringify(message)
val = message.payload[1] - 500
if val < 0
val = 0
val = (val / 50)*2
if val > 9
val = 9
bac_level = parseInt(val) || 0
messages = []
blinkyMessage = {
devices: [blinky_uuid],
payload: bac_level
}
messages.push blinkyMessage
if bac_level > 4
carMessage = {
devices: [rally_uuid],
payload:
m : 'lock'
}
messages.push carMessage
debouncedSMS()
deboucedSend(meshblu, messages);
| true | Meshblu = require './src/meshblu'
Device = require './src/device'
{spawn} = require 'child_process'
request = require 'request'
_ = require 'lodash'
device_uuid = process.env.DEVICE_UUID
device_token = process.env.DEVICE_TOKEN
payload_only = process.env.PAYLOAD_ONLY
meshblu_uri = process.env.MESHBLU_URI || 'wss://meshblu.octoblu.com'
send = (meshblu, messages) =>
_.each messages, (msg) =>
console.log 'Sending', JSON.stringify(msg)
meshblu.connection.message msg
sendSMS = =>
console.log('Sending SMS')
request.post
uri: "https://api.sendhub.com/v1/messages/?username=4805706128&api_key=PI:KEY:<KEY>END_PI"
json:
contacts: ['108260810164275910']
text: 'RESCUE ME!'
deboucedSend = _.throttle send, 1000
debouncedSMS = _.once sendSMS
face_uuid = '81d1fb81-36aa-11e4-8e5a-919063640dc3'
blinky_uuid = '4c225891-1e4a-11e4-93b0-b35ccdc222f2'
rally_uuid = 'c43462d1-1cea-11e4-861d-89322229e557/3c701ab0-2a69-11e4-ba29-b7d9779a4387'
meshblu = new Meshblu device_uuid, device_token, meshblu_uri, =>
console.log 'ready'
meshblu.connection.subscribe(uuid: device_uuid)
meshblu.onMessage (message) =>
if payload_only
console.log JSON.stringify(message.payload)
else
console.log JSON.stringify(message)
val = message.payload[1] - 500
if val < 0
val = 0
val = (val / 50)*2
if val > 9
val = 9
bac_level = parseInt(val) || 0
messages = []
blinkyMessage = {
devices: [blinky_uuid],
payload: bac_level
}
messages.push blinkyMessage
if bac_level > 4
carMessage = {
devices: [rally_uuid],
payload:
m : 'lock'
}
messages.push carMessage
debouncedSMS()
deboucedSend(meshblu, messages);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.