entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "irl.Definition 'user', {}, (f) ->\n f.name = 'Peter'\n\n attribute = definition.attributes.name\n\n ",
"end": 276,
"score": 0.9996064901351929,
"start": 271,
"tag": "NAME",
"value": "Peter"
},
{
"context": "eql('name')\n expect(attribute.value()).to.eql('Pe... | spec/rogue_girl/definition_spec.coffee | smolnar/rogue-girl | 0 | #= require spec_helper
describe 'RogueGirl.Definition', ->
beforeEach ->
RogueGirl.define 'role', ->
RogueGirl.define 'awesome role', type: 'role', ->
it 'defines field value', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'Peter'
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('Peter')
it 'defines field value as function', ->
n = 0
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = -> "Peter ##{n += 1}"
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('Peter #1')
expect(attribute.value()).to.eql('Peter #2')
it 'defines field with a sequence', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "peter_#{n}@parker.com"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('peter_0@parker.com')
expect(attribute.value()).to.eql('peter_1@parker.com')
it 'defines field with a sequence within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "peter_#{n}@parker.com"
@trait 'admin', ->
@sequence 'email', (n) -> "admin_#{n}@parker.com"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('peter_0@parker.com')
expect(attribute.value()).to.eql('peter_1@parker.com')
attribute = definition.traits.admin.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql("admin_2@parker.com")
it 'defines an association', ->
RogueGirl.define 'role', (f) ->
f.name = 'default'
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
it 'defines an association within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
@trait 'admin', ->
@association 'role', name: 'admin'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
attribute = definition.traits.admin.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params[1]).to.eql(name: 'admin')
expect(attribute.params.length).to.eql(2)
it 'defines an association with custom name', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'awesome role'
attribute = definition.attributes['awesome role']
expect(attribute.name).to.eql('awesome role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('awesome role')
expect(attribute.params.length).to.eql(1)
describe '#buildAttributes', ->
beforeEach ->
@factory = mock('RogueGirl.Factory', create: ->)
it 'builds attributes', ->
RogueGirl.define 'permission', (f) ->
f.name = 'basic'
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'Peter'
@sequence 'email', (n) -> "peter_#{n}@parker.com"
@association 'permission'
@record = mock(get: ->)
@factory
.expects('create')
.withExactArgs('permission')
.returns(@record.object)
.once()
@record.mock
.expects('get')
.withExactArgs('id')
.returns(1)
.once()
attributes = {}
callbacks = definition.buildAttributes(attributes)
expect(attributes).to.eql(
id: 1
name: 'Peter'
email: 'peter_0@parker.com'
permission:
__association__:
parent: 'permission'
child: 'user'
record: @record.object
)
expect(callbacks.length).to.eql(1)
it 'builds attributes with traits', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'Peter'
f.email = 'peter@parker.com'
@trait 'as admin', (f) ->
f.name = 'Admin'
attributes = {}
definition.buildAttributes(attributes, ['as admin'])
expect(attributes).to.eql(id: 1, name: 'Admin', email: 'peter@parker.com')
| 221587 | #= require spec_helper
describe 'RogueGirl.Definition', ->
beforeEach ->
RogueGirl.define 'role', ->
RogueGirl.define 'awesome role', type: 'role', ->
it 'defines field value', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = '<NAME>'
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('<NAME>')
it 'defines field value as function', ->
n = 0
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = -> "<NAME> ##{n += 1}"
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('<NAME> #1')
expect(attribute.value()).to.eql('<NAME> #2')
it 'defines field with a sequence', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "<EMAIL>"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('<EMAIL>')
expect(attribute.value()).to.eql('<EMAIL>')
it 'defines field with a sequence within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "<EMAIL>"
@trait 'admin', ->
@sequence 'email', (n) -> "<EMAIL>"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('<EMAIL>')
expect(attribute.value()).to.eql('<EMAIL>')
attribute = definition.traits.admin.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql("admin_<EMAIL>")
it 'defines an association', ->
RogueGirl.define 'role', (f) ->
f.name = 'default'
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
it 'defines an association within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
@trait 'admin', ->
@association 'role', name: 'admin'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
attribute = definition.traits.admin.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params[1]).to.eql(name: 'admin')
expect(attribute.params.length).to.eql(2)
it 'defines an association with custom name', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'awesome role'
attribute = definition.attributes['awesome role']
expect(attribute.name).to.eql('awesome role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('awesome role')
expect(attribute.params.length).to.eql(1)
describe '#buildAttributes', ->
beforeEach ->
@factory = mock('RogueGirl.Factory', create: ->)
it 'builds attributes', ->
RogueGirl.define 'permission', (f) ->
f.name = 'basic'
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = '<NAME>'
@sequence 'email', (n) -> "<EMAIL>"
@association 'permission'
@record = mock(get: ->)
@factory
.expects('create')
.withExactArgs('permission')
.returns(@record.object)
.once()
@record.mock
.expects('get')
.withExactArgs('id')
.returns(1)
.once()
attributes = {}
callbacks = definition.buildAttributes(attributes)
expect(attributes).to.eql(
id: 1
name: '<NAME>'
email: '<EMAIL>'
permission:
__association__:
parent: 'permission'
child: 'user'
record: @record.object
)
expect(callbacks.length).to.eql(1)
it 'builds attributes with traits', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = '<NAME>'
f.email = '<EMAIL>'
@trait 'as admin', (f) ->
f.name = 'Admin'
attributes = {}
definition.buildAttributes(attributes, ['as admin'])
expect(attributes).to.eql(id: 1, name: 'Admin', email: '<EMAIL>')
| true | #= require spec_helper
describe 'RogueGirl.Definition', ->
beforeEach ->
RogueGirl.define 'role', ->
RogueGirl.define 'awesome role', type: 'role', ->
it 'defines field value', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'PI:NAME:<NAME>END_PI'
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('PI:NAME:<NAME>END_PI')
it 'defines field value as function', ->
n = 0
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = -> "PI:NAME:<NAME>END_PI ##{n += 1}"
attribute = definition.attributes.name
expect(attribute.name).to.eql('name')
expect(attribute.value()).to.eql('PI:NAME:<NAME>END_PI #1')
expect(attribute.value()).to.eql('PI:NAME:<NAME>END_PI #2')
it 'defines field with a sequence', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "PI:EMAIL:<EMAIL>END_PI"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('PI:EMAIL:<EMAIL>END_PI')
expect(attribute.value()).to.eql('PI:EMAIL:<EMAIL>END_PI')
it 'defines field with a sequence within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@sequence 'email', (n) -> "PI:EMAIL:<EMAIL>END_PI"
@trait 'admin', ->
@sequence 'email', (n) -> "PI:EMAIL:<EMAIL>END_PI"
attribute = definition.attributes.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql('PI:EMAIL:<EMAIL>END_PI')
expect(attribute.value()).to.eql('PI:EMAIL:<EMAIL>END_PI')
attribute = definition.traits.admin.email
expect(attribute.name).to.eql('email')
expect(attribute.value()).to.eql("admin_PI:EMAIL:<EMAIL>END_PI")
it 'defines an association', ->
RogueGirl.define 'role', (f) ->
f.name = 'default'
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
it 'defines an association within trait', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'role'
@trait 'admin', ->
@association 'role', name: 'admin'
attribute = definition.attributes.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params.length).to.eql(1)
attribute = definition.traits.admin.role
expect(attribute.name).to.eql('role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('role')
expect(attribute.params[1]).to.eql(name: 'admin')
expect(attribute.params.length).to.eql(2)
it 'defines an association with custom name', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
@association 'awesome role'
attribute = definition.attributes['awesome role']
expect(attribute.name).to.eql('awesome role')
expect(attribute.parent).to.eql('role')
expect(attribute.child).to.eql('user')
expect(attribute.params[0]).to.eql('awesome role')
expect(attribute.params.length).to.eql(1)
describe '#buildAttributes', ->
beforeEach ->
@factory = mock('RogueGirl.Factory', create: ->)
it 'builds attributes', ->
RogueGirl.define 'permission', (f) ->
f.name = 'basic'
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'PI:NAME:<NAME>END_PI'
@sequence 'email', (n) -> "PI:EMAIL:<EMAIL>END_PI"
@association 'permission'
@record = mock(get: ->)
@factory
.expects('create')
.withExactArgs('permission')
.returns(@record.object)
.once()
@record.mock
.expects('get')
.withExactArgs('id')
.returns(1)
.once()
attributes = {}
callbacks = definition.buildAttributes(attributes)
expect(attributes).to.eql(
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
permission:
__association__:
parent: 'permission'
child: 'user'
record: @record.object
)
expect(callbacks.length).to.eql(1)
it 'builds attributes with traits', ->
definition = new RogueGirl.Definition 'user', {}, (f) ->
f.name = 'PI:NAME:<NAME>END_PI'
f.email = 'PI:EMAIL:<EMAIL>END_PI'
@trait 'as admin', (f) ->
f.name = 'Admin'
attributes = {}
definition.buildAttributes(attributes, ['as admin'])
expect(attributes).to.eql(id: 1, name: 'Admin', email: 'PI:EMAIL:<EMAIL>END_PI')
|
[
{
"context": "est\n \t\n Updates an existing manufacturer\n@author Nathan Klick\n@copyright QRef 2012\n###\nclass AircraftManufactur",
"end": 626,
"score": 0.9998850226402283,
"start": 614,
"tag": "NAME",
"value": "Nathan Klick"
}
] | Workspace/QRef/NodeServer/src/router/routes/ajax/aircraft/AircraftManufacturerRoute.coffee | qrefdev/qref | 0 | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all manufacturers and the creation of new manufacturers.
@example Service Methods (see {CreateAircraftManufacturerAjaxRequest})
Request Format: application/json
Response Format: application/json
POST /services/ajax/aircraft/manufacturers
@BODY - (Required) CreateAircraftManufacturerAjaxRequest
Updates an existing manufacturer
@author Nathan Klick
@copyright QRef 2012
###
class AircraftManufacturerRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: '/manufacturer' }, { method: 'GET', path: '/manufacturer' }]
post: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
manufacturerId = req.body.id
UserAuth.validateToken(token, (err, isTokenValid) =>
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
UserAuth.isInAnyRole(token, ['Administrators'], (err, success) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
if success
@.getManufacturer(manufacturerId, (err, manufacturer) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
manufacturer.name = req.body.name
manufacturer.description = req.body.description
manufacturer.save((err) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
resp = new AjaxResponse();
resp.addRecords([manufacturer])
res.json(resp, 200)
return
)
)
else
resp = new AjaxResponse()
resp.failure('You do not have the proper permissions')
res.json(resp, 200)
return
)
)
getManufacturer: (id, callback) =>
db = QRefDatabase.instance()
db.AircraftManufacturer.findOne({ _id: id}, (err, manufacturer) =>
if err?
callback(err, null)
return
if manufacturer?
callback(null, manufacturer)
return
else
callback(new Error('No manufacturer found for that id'), null)
return
)
isValidRequest: (req) ->
if (req.query? and req.query?.token?) or
(req.body? and req.body?.token? and req.body?.mode? and req.body.mode == 'ajax' && req.body?.id? && req.body?.name? && req.body?.description?)
true
else
false
module.exports = new AircraftManufacturerRoute() | 124450 | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all manufacturers and the creation of new manufacturers.
@example Service Methods (see {CreateAircraftManufacturerAjaxRequest})
Request Format: application/json
Response Format: application/json
POST /services/ajax/aircraft/manufacturers
@BODY - (Required) CreateAircraftManufacturerAjaxRequest
Updates an existing manufacturer
@author <NAME>
@copyright QRef 2012
###
class AircraftManufacturerRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: '/manufacturer' }, { method: 'GET', path: '/manufacturer' }]
post: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
manufacturerId = req.body.id
UserAuth.validateToken(token, (err, isTokenValid) =>
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
UserAuth.isInAnyRole(token, ['Administrators'], (err, success) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
if success
@.getManufacturer(manufacturerId, (err, manufacturer) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
manufacturer.name = req.body.name
manufacturer.description = req.body.description
manufacturer.save((err) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
resp = new AjaxResponse();
resp.addRecords([manufacturer])
res.json(resp, 200)
return
)
)
else
resp = new AjaxResponse()
resp.failure('You do not have the proper permissions')
res.json(resp, 200)
return
)
)
getManufacturer: (id, callback) =>
db = QRefDatabase.instance()
db.AircraftManufacturer.findOne({ _id: id}, (err, manufacturer) =>
if err?
callback(err, null)
return
if manufacturer?
callback(null, manufacturer)
return
else
callback(new Error('No manufacturer found for that id'), null)
return
)
isValidRequest: (req) ->
if (req.query? and req.query?.token?) or
(req.body? and req.body?.token? and req.body?.mode? and req.body.mode == 'ajax' && req.body?.id? && req.body?.name? && req.body?.description?)
true
else
false
module.exports = new AircraftManufacturerRoute() | true | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all manufacturers and the creation of new manufacturers.
@example Service Methods (see {CreateAircraftManufacturerAjaxRequest})
Request Format: application/json
Response Format: application/json
POST /services/ajax/aircraft/manufacturers
@BODY - (Required) CreateAircraftManufacturerAjaxRequest
Updates an existing manufacturer
@author PI:NAME:<NAME>END_PI
@copyright QRef 2012
###
class AircraftManufacturerRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: '/manufacturer' }, { method: 'GET', path: '/manufacturer' }]
post: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
manufacturerId = req.body.id
UserAuth.validateToken(token, (err, isTokenValid) =>
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
UserAuth.isInAnyRole(token, ['Administrators'], (err, success) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
if success
@.getManufacturer(manufacturerId, (err, manufacturer) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
manufacturer.name = req.body.name
manufacturer.description = req.body.description
manufacturer.save((err) =>
if err?
resp = new AjaxResponse()
resp.failure(err, 403)
res.json(resp, 200)
return
resp = new AjaxResponse();
resp.addRecords([manufacturer])
res.json(resp, 200)
return
)
)
else
resp = new AjaxResponse()
resp.failure('You do not have the proper permissions')
res.json(resp, 200)
return
)
)
getManufacturer: (id, callback) =>
db = QRefDatabase.instance()
db.AircraftManufacturer.findOne({ _id: id}, (err, manufacturer) =>
if err?
callback(err, null)
return
if manufacturer?
callback(null, manufacturer)
return
else
callback(new Error('No manufacturer found for that id'), null)
return
)
isValidRequest: (req) ->
if (req.query? and req.query?.token?) or
(req.body? and req.body?.token? and req.body?.mode? and req.body.mode == 'ajax' && req.body?.id? && req.body?.name? && req.body?.description?)
true
else
false
module.exports = new AircraftManufacturerRoute() |
[
{
"context": "elected: true })\n\n copyCurrentUrl: ->\n # TODO(ilya): When the following bug is fixed, revisit this a",
"end": 9755,
"score": 0.7520865201950073,
"start": 9751,
"tag": "NAME",
"value": "ilya"
},
{
"context": ".altKey) && event.keyCode > 31) || (\n # TODO(phil... | content_scripts/vimium_frontend.coffee | scanny/vimium | 1 | #
# This content script takes input from its webpage and executes commands locally on behalf of the background
# page. It must be run prior to domReady so that we perform some operations very early. We tell the
# background page that we're in domReady and ready to accept normal commands by connectiong to a port named
# "domReady".
#
window.handlerStack = new HandlerStack
insertModeLock = null
findMode = false
findModeQuery = { rawQuery: "", matchCount: 0 }
findModeQueryHasResults = false
findModeAnchorNode = null
isShowingHelpDialog = false
keyPort = null
# Users can disable Vimium on URL patterns via the settings page.
isEnabledForUrl = true
# The user's operating system.
currentCompletionKeys = null
validFirstKeys = null
# The types in <input type="..."> that we consider for focusInput command. Right now this is recalculated in
# each content script. Alternatively we could calculate it once in the background page and use a request to
# fetch it each time.
# Should we include the HTML5 date pickers here?
# The corresponding XPath for such elements.
textInputXPath = (->
textInputTypes = ["text", "search", "email", "url", "number", "password"]
inputElements = ["input[" +
"(" + textInputTypes.map((type) -> '@type="' + type + '"').join(" or ") + "or not(@type))" +
" and not(@disabled or @readonly)]",
"textarea", "*[@contenteditable='' or translate(@contenteditable, 'TRUE', 'true')='true']"]
DomUtils.makeXPath(inputElements)
)()
#
# settings provides a browser-global localStorage-backed dict. get() and set() are synchronous, but load()
# must be called beforehand to ensure get() will return up-to-date values.
#
settings =
port: null
values: {}
loadedValues: 0
valuesToLoad: ["scrollStepSize", "linkHintCharacters", "linkHintNumbers", "filterLinkHints", "hideHud",
"previousPatterns", "nextPatterns", "findModeRawQuery", "regexFindMode", "userDefinedLinkHintCss",
"helpDialog_showAdvancedCommands"]
isLoaded: false
eventListeners: {}
init: ->
@port = chrome.runtime.connect({ name: "settings" })
@port.onMessage.addListener(@receiveMessage)
get: (key) -> @values[key]
set: (key, value) ->
@init() unless @port
@values[key] = value
@port.postMessage({ operation: "set", key: key, value: value })
load: ->
@init() unless @port
for i of @valuesToLoad
@port.postMessage({ operation: "get", key: @valuesToLoad[i] })
receiveMessage: (args) ->
# not using 'this' due to issues with binding on callback
settings.values[args.key] = args.value
# since load() can be called more than once, loadedValues can be greater than valuesToLoad, but we test
# for equality so initializeOnReady only runs once
if (++settings.loadedValues == settings.valuesToLoad.length)
settings.isLoaded = true
listener = null
while (listener = settings.eventListeners["load"].pop())
listener()
addEventListener: (eventName, callback) ->
if (!(eventName of @eventListeners))
@eventListeners[eventName] = []
@eventListeners[eventName].push(callback)
#
# Give this frame a unique id.
#
frameId = Math.floor(Math.random()*999999999)
hasModifiersRegex = /^<([amc]-)+.>/
#
# Complete initialization work that sould be done prior to DOMReady.
#
initializePreDomReady = ->
settings.addEventListener("load", LinkHints.init.bind(LinkHints))
settings.load()
Scroller.init()
checkIfEnabledForUrl()
refreshCompletionKeys()
# Send the key to the key handler in the background page.
keyPort = chrome.runtime.connect({ name: "keyDown" })
requestHandlers =
hideUpgradeNotification: -> HUD.hideUpgradeNotification()
showUpgradeNotification: (request) -> HUD.showUpgradeNotification(request.version)
showHUDforDuration: (request) -> HUD.showForDuration request.text, request.duration
toggleHelpDialog: (request) -> toggleHelpDialog(request.dialogHtml, request.frameId)
focusFrame: (request) -> if (frameId == request.frameId) then focusThisFrame(request.highlight)
refreshCompletionKeys: refreshCompletionKeys
getScrollPosition: -> scrollX: window.scrollX, scrollY: window.scrollY
setScrollPosition: (request) -> setScrollPosition request.scrollX, request.scrollY
executePageCommand: executePageCommand
getActiveState: -> { enabled: isEnabledForUrl }
disableVimium: disableVimium
chrome.runtime.onMessage.addListener (request, sender, sendResponse) ->
# in the options page, we will receive requests from both content and background scripts. ignore those
# from the former.
return if sender.tab and not sender.tab.url.startsWith 'chrome-extension://'
return unless isEnabledForUrl or request.name == 'getActiveState'
sendResponse requestHandlers[request.name](request, sender)
# Ensure the sendResponse callback is freed.
false
#
# This is called once the background page has told us that Vimium should be enabled for the current URL.
#
initializeWhenEnabled = ->
document.addEventListener("keydown", onKeydown, true)
document.addEventListener("keypress", onKeypress, true)
document.addEventListener("keyup", onKeyup, true)
document.addEventListener("focus", onFocusCapturePhase, true)
document.addEventListener("blur", onBlurCapturePhase, true)
document.addEventListener("DOMActivate", onDOMActivate, true)
enterInsertModeIfElementIsFocused()
#
# Used to disable Vimium without needing to reload the page.
# This is called if the current page's url is blacklisted using the popup UI.
#
disableVimium = ->
document.removeEventListener("keydown", onKeydown, true)
document.removeEventListener("keypress", onKeypress, true)
document.removeEventListener("keyup", onKeyup, true)
document.removeEventListener("focus", onFocusCapturePhase, true)
document.removeEventListener("blur", onBlurCapturePhase, true)
document.removeEventListener("DOMActivate", onDOMActivate, true)
isEnabledForUrl = false
#
# The backend needs to know which frame has focus.
#
window.addEventListener "focus", ->
# settings may have changed since the frame last had focus
settings.load()
chrome.runtime.sendMessage({ handler: "frameFocused", frameId: frameId })
#
# Initialization tasks that must wait for the document to be ready.
#
initializeOnDomReady = ->
registerFrameIfSizeAvailable(window.top == window.self)
enterInsertModeIfElementIsFocused() if isEnabledForUrl
# Tell the background page we're in the dom ready state.
chrome.runtime.connect({ name: "domReady" })
# This is a little hacky but sometimes the size wasn't available on domReady?
registerFrameIfSizeAvailable = (is_top) ->
if (innerWidth != undefined && innerWidth != 0 && innerHeight != undefined && innerHeight != 0)
chrome.runtime.sendMessage(
handler: "registerFrame"
frameId: frameId
area: innerWidth * innerHeight
is_top: is_top
total: frames.length + 1)
else
setTimeout((-> registerFrameIfSizeAvailable(is_top)), 100)
#
# Enters insert mode if the currently focused element in the DOM is focusable.
#
enterInsertModeIfElementIsFocused = ->
if (document.activeElement && isEditable(document.activeElement) && !findMode)
enterInsertModeWithoutShowingIndicator(document.activeElement)
onDOMActivate = (event) -> handlerStack.bubbleEvent 'DOMActivate', event
executePageCommand = (request) ->
return unless frameId == request.frameId
if (request.passCountToFunction)
Utils.invokeCommandString(request.command, [request.count])
else
Utils.invokeCommandString(request.command) for i in [0...request.count]
refreshCompletionKeys(request)
setScrollPosition = (scrollX, scrollY) ->
if (scrollX > 0 || scrollY > 0)
DomUtils.documentReady(-> window.scrollTo(scrollX, scrollY))
#
# Called from the backend in order to change frame focus.
#
window.focusThisFrame = (shouldHighlight) ->
window.focus()
if (document.body && shouldHighlight)
borderWas = document.body.style.border
document.body.style.border = '5px solid yellow'
setTimeout((-> document.body.style.border = borderWas), 200)
extend window,
scrollToBottom: -> Scroller.scrollTo "y", "max"
scrollToTop: -> Scroller.scrollTo "y", 0
scrollToLeft: -> Scroller.scrollTo "x", 0
scrollToRight: -> Scroller.scrollTo "x", "max"
scrollUp: -> Scroller.scrollBy "y", -1 * settings.get("scrollStepSize")
scrollDown: -> Scroller.scrollBy "y", settings.get("scrollStepSize")
scrollPageUp: -> Scroller.scrollBy "y", "viewSize", -1/2
scrollPageDown: -> Scroller.scrollBy "y", "viewSize", 1/2
scrollFullPageUp: -> Scroller.scrollBy "y", "viewSize", -1
scrollFullPageDown: -> Scroller.scrollBy "y", "viewSize"
scrollLeft: -> Scroller.scrollBy "x", -1 * settings.get("scrollStepSize")
scrollRight: -> Scroller.scrollBy "x", settings.get("scrollStepSize")
extend window,
reload: -> window.location.reload()
goBack: (count) -> history.go(-count)
goForward: (count) -> history.go(count)
goUp: (count) ->
url = window.location.href
if (url[url.length - 1] == "/")
url = url.substring(0, url.length - 1)
urlsplit = url.split("/")
# make sure we haven't hit the base domain yet
if (urlsplit.length > 3)
urlsplit = urlsplit.slice(0, Math.max(3, urlsplit.length - count))
window.location.href = urlsplit.join('/')
goToRoot: () ->
window.location.href = window.location.origin
toggleViewSource: ->
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
if (url.substr(0, 12) == "view-source:")
url = url.substr(12, url.length - 12)
else
url = "view-source:" + url
chrome.runtime.sendMessage({ handler: "openUrlInNewTab", url: url, selected: true })
copyCurrentUrl: ->
# TODO(ilya): When the following bug is fixed, revisit this approach of sending back to the background
# page to copy.
# http://code.google.com/p/chromium/issues/detail?id=55188
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
chrome.runtime.sendMessage { handler: "copyToClipboard", data: url }
HUD.showForDuration("Yanked URL", 1000)
focusInput: (count) ->
# Focus the first input element on the page, and create overlays to highlight all the input elements, with
# the currently-focused element highlighted specially. Tabbing will shift focus to the next input element.
# Pressing any other key will remove the overlays and the special tab behavior.
resultSet = DomUtils.evaluateXPath(textInputXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
visibleInputs =
for i in [0...resultSet.snapshotLength] by 1
element = resultSet.snapshotItem(i)
rect = DomUtils.getVisibleClientRect(element)
continue if rect == null
{ element: element, rect: rect }
return if visibleInputs.length == 0
selectedInputIndex = Math.min(count - 1, visibleInputs.length - 1)
visibleInputs[selectedInputIndex].element.focus()
return if visibleInputs.length == 1
hints = for tuple in visibleInputs
hint = document.createElement("div")
hint.className = "vimiumReset internalVimiumInputHint vimiumInputHint"
# minus 1 for the border
hint.style.left = (tuple.rect.left - 1) + window.scrollX + "px"
hint.style.top = (tuple.rect.top - 1) + window.scrollY + "px"
hint.style.width = tuple.rect.width + "px"
hint.style.height = tuple.rect.height + "px"
hint
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
hintContainingDiv = DomUtils.addElementList(hints,
{ id: "vimiumInputMarkerContainer", className: "vimiumReset" })
handlerStack.push keydown: (event) ->
if event.keyCode == KeyboardUtils.keyCodes.tab
hints[selectedInputIndex].classList.remove 'internalVimiumSelectedInputHint'
if event.shiftKey
if --selectedInputIndex == -1
selectedInputIndex = hints.length - 1
else
if ++selectedInputIndex == hints.length
selectedInputIndex = 0
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
visibleInputs[selectedInputIndex].element.focus()
else unless event.keyCode == KeyboardUtils.keyCodes.shiftKey
DomUtils.removeElement hintContainingDiv
@remove()
return true
false
#
# Sends everything except i & ESC to the handler in background_page. i & ESC are special because they control
# insert mode which is local state to the page. The key will be are either a single ascii letter or a
# key-modifier pair, e.g. <c-a> for control a.
#
# Note that some keys will only register keydown events and not keystroke events, e.g. ESC.
#
onKeypress = (event) ->
return unless handlerStack.bubbleEvent('keypress', event)
keyChar = ""
# Ignore modifier keys by themselves.
if (event.keyCode > 31)
keyChar = String.fromCharCode(event.charCode)
# Enter insert mode when the user enables the native find interface.
if (keyChar == "f" && KeyboardUtils.isPrimaryModifierKey(event))
enterInsertModeWithoutShowingIndicator()
return
if (keyChar)
if (findMode)
handleKeyCharForFindMode(keyChar)
DomUtils.suppressEvent(event)
else if (!isInsertMode() && !findMode)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
onKeydown = (event) ->
return unless handlerStack.bubbleEvent('keydown', event)
keyChar = ""
# handle special keys, and normal input keys with modifiers being pressed. don't handle shiftKey alone (to
# avoid / being interpreted as ?
if (((event.metaKey || event.ctrlKey || event.altKey) && event.keyCode > 31) || (
# TODO(philc): some events don't have a keyidentifier. How is that possible?
event.keyIdentifier && event.keyIdentifier.slice(0, 2) != "U+"))
keyChar = KeyboardUtils.getKeyChar(event)
# Again, ignore just modifiers. Maybe this should replace the keyCode>31 condition.
if (keyChar != "")
modifiers = []
if (event.shiftKey)
keyChar = keyChar.toUpperCase()
if (event.metaKey)
modifiers.push("m")
if (event.ctrlKey)
modifiers.push("c")
if (event.altKey)
modifiers.push("a")
for i of modifiers
keyChar = modifiers[i] + "-" + keyChar
if (modifiers.length > 0 || keyChar.length > 1)
keyChar = "<" + keyChar + ">"
if (isInsertMode() && KeyboardUtils.isEscape(event))
# Note that we can't programmatically blur out of Flash embeds from Javascript.
if (!isEmbed(event.srcElement))
# Remove focus so the user can't just get himself back into insert mode by typing in the same input
# box.
if (isEditable(event.srcElement))
event.srcElement.blur()
exitInsertMode()
DomUtils.suppressEvent(event)
else if (findMode)
if (KeyboardUtils.isEscape(event))
handleEscapeForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.backspace || event.keyCode == keyCodes.deleteKey)
handleDeleteForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.enter)
handleEnterForFindMode()
DomUtils.suppressEvent(event)
else if (!modifiers)
event.stopPropagation()
else if (isShowingHelpDialog && KeyboardUtils.isEscape(event))
hideHelpDialog()
else if (!isInsertMode() && !findMode)
if (keyChar)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
else if (KeyboardUtils.isEscape(event))
keyPort.postMessage({ keyChar:"<ESC>", frameId:frameId })
# Added to prevent propagating this event to other listeners if it's one that'll trigger a Vimium command.
# The goal is to avoid the scenario where Google Instant Search uses every keydown event to dump us
# back into the search box. As a side effect, this should also prevent overriding by other sites.
#
# Subject to internationalization issues since we're using keyIdentifier instead of charCode (in keypress).
#
# TOOD(ilya): Revisit this. Not sure it's the absolute best approach.
if (keyChar == "" && !isInsertMode() &&
(currentCompletionKeys.indexOf(KeyboardUtils.getKeyChar(event)) != -1 ||
isValidFirstKey(KeyboardUtils.getKeyChar(event))))
event.stopPropagation()
onKeyup = (event) -> return unless handlerStack.bubbleEvent('keyup', event)
checkIfEnabledForUrl = ->
url = window.location.toString()
chrome.runtime.sendMessage { handler: "isEnabledForUrl", url: url }, (response) ->
isEnabledForUrl = response.isEnabledForUrl
if (isEnabledForUrl)
initializeWhenEnabled()
else if (HUD.isReady())
# Quickly hide any HUD we might already be showing, e.g. if we entered insert mode on page load.
HUD.hide()
refreshCompletionKeys = (response) ->
if (response)
currentCompletionKeys = response.completionKeys
if (response.validFirstKeys)
validFirstKeys = response.validFirstKeys
else
chrome.runtime.sendMessage({ handler: "getCompletionKeys" }, refreshCompletionKeys)
isValidFirstKey = (keyChar) ->
validFirstKeys[keyChar] || /[1-9]/.test(keyChar)
onFocusCapturePhase = (event) ->
if (isFocusable(event.target) && !findMode)
enterInsertModeWithoutShowingIndicator(event.target)
onBlurCapturePhase = (event) ->
if (isFocusable(event.target))
exitInsertMode(event.target)
#
# Returns true if the element is focusable. This includes embeds like Flash, which steal the keybaord focus.
#
isFocusable = (element) -> isEditable(element) || isEmbed(element)
#
# Embedded elements like Flash and quicktime players can obtain focus but cannot be programmatically
# unfocused.
#
isEmbed = (element) -> ["embed", "object"].indexOf(element.nodeName.toLowerCase()) > 0
#
# Input or text elements are considered focusable and able to receieve their own keyboard events,
# and will enter enter mode if focused. Also note that the "contentEditable" attribute can be set on
# any element which makes it a rich text editor, like the notes on jjot.com.
#
isEditable = (target) ->
return true if target.isContentEditable
nodeName = target.nodeName.toLowerCase()
# use a blacklist instead of a whitelist because new form controls are still being implemented for html5
noFocus = ["radio", "checkbox"]
if (nodeName == "input" && noFocus.indexOf(target.type) == -1)
return true
focusableElements = ["textarea", "select"]
focusableElements.indexOf(nodeName) >= 0
#
# Enters insert mode and show an "Insert mode" message. Showing the UI is only useful when entering insert
# mode manually by pressing "i". In most cases we do not show any UI (enterInsertModeWithoutShowingIndicator)
#
window.enterInsertMode = (target) ->
enterInsertModeWithoutShowingIndicator(target)
HUD.show("Insert mode")
#
# We cannot count on 'focus' and 'blur' events to happen sequentially. For example, if blurring element A
# causes element B to come into focus, we may get "B focus" before "A blur". Thus we only leave insert mode
# when the last editable element that came into focus -- which insertModeLock points to -- has been blurred.
# If insert mode is entered manually (via pressing 'i'), then we set insertModeLock to 'undefined', and only
# leave insert mode when the user presses <ESC>.
#
enterInsertModeWithoutShowingIndicator = (target) -> insertModeLock = target
exitInsertMode = (target) ->
if (target == undefined || insertModeLock == target)
insertModeLock = null
HUD.hide()
isInsertMode = -> insertModeLock != null
# should be called whenever rawQuery is modified.
updateFindModeQuery = ->
# the query can be treated differently (e.g. as a plain string versus regex depending on the presence of
# escape sequences. '\' is the escape character and needs to be escaped itself to be used as a normal
# character. here we grep for the relevant escape sequences.
findModeQuery.isRegex = settings.get 'regexFindMode'
hasNoIgnoreCaseFlag = false
findModeQuery.parsedQuery = findModeQuery.rawQuery.replace /\\./g, (match) ->
switch (match)
when "\\r"
findModeQuery.isRegex = true
return ""
when "\\R"
findModeQuery.isRegex = false
return ""
when "\\I"
hasNoIgnoreCaseFlag = true
return ""
when "\\\\"
return "\\"
else
return match
# default to 'smartcase' mode, unless noIgnoreCase is explicitly specified
findModeQuery.ignoreCase = !hasNoIgnoreCaseFlag && !Utils.hasUpperCase(findModeQuery.parsedQuery)
# if we are dealing with a regex, grep for all matches in the text, and then call window.find() on them
# sequentially so the browser handles the scrolling / text selection.
if findModeQuery.isRegex
try
pattern = new RegExp(findModeQuery.parsedQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
catch error
# if we catch a SyntaxError, assume the user is not done typing yet and return quietly
return
# innerText will not return the text of hidden elements, and strip out tags while preserving newlines
text = document.body.innerText
findModeQuery.regexMatches = text.match(pattern)
findModeQuery.activeRegexIndex = 0
findModeQuery.matchCount = findModeQuery.regexMatches?.length
# if we are doing a basic plain string match, we still want to grep for matches of the string, so we can
# show a the number of results. We can grep on document.body.innerText, as it should be indistinguishable
# from the internal representation used by window.find.
else
# escape all special characters, so RegExp just parses the string 'as is'.
# Taken from http://stackoverflow.com/questions/3446170/escape-string-for-use-in-javascript-regex
escapeRegExp = /[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g
parsedNonRegexQuery = findModeQuery.parsedQuery.replace(escapeRegExp, (char) -> "\\" + char)
pattern = new RegExp(parsedNonRegexQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
text = document.body.innerText
findModeQuery.matchCount = text.match(pattern)?.length
handleKeyCharForFindMode = (keyChar) ->
findModeQuery.rawQuery += keyChar
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
handleEscapeForFindMode = ->
exitFindMode()
document.body.classList.remove("vimiumFindMode")
# removing the class does not re-color existing selections. we recreate the current selection so it reverts
# back to the default color.
selection = window.getSelection()
unless selection.isCollapsed
range = window.getSelection().getRangeAt(0)
window.getSelection().removeAllRanges()
window.getSelection().addRange(range)
focusFoundLink() || selectFoundInputElement()
handleDeleteForFindMode = ->
if (findModeQuery.rawQuery.length == 0)
exitFindMode()
performFindInPlace()
else
findModeQuery.rawQuery = findModeQuery.rawQuery.substring(0, findModeQuery.rawQuery.length - 1)
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
# <esc> sends us into insert mode if possible, but <cr> does not.
# <esc> corresponds approximately to 'nevermind, I have found it already' while <cr> means 'I want to save
# this query and do more searches with it'
handleEnterForFindMode = ->
exitFindMode()
focusFoundLink()
document.body.classList.add("vimiumFindMode")
settings.set("findModeRawQuery", findModeQuery.rawQuery)
performFindInPlace = ->
cachedScrollX = window.scrollX
cachedScrollY = window.scrollY
query = if findModeQuery.isRegex then getNextQueryFromRegexMatches(0) else findModeQuery.parsedQuery
# Search backwards first to "free up" the current word as eligible for the real forward search. This allows
# us to search in place without jumping around between matches as the query grows.
executeFind(query, { backwards: true, caseSensitive: !findModeQuery.ignoreCase })
# We need to restore the scroll position because we might've lost the right position by searching
# backwards.
window.scrollTo(cachedScrollX, cachedScrollY)
findModeQueryHasResults = executeFind(query, { caseSensitive: !findModeQuery.ignoreCase })
# :options is an optional dict. valid parameters are 'caseSensitive' and 'backwards'.
executeFind = (query, options) ->
options = options || {}
# rather hacky, but this is our way of signalling to the insertMode listener not to react to the focus
# changes that find() induces.
oldFindMode = findMode
findMode = true
document.body.classList.add("vimiumFindMode")
# prevent find from matching its own search query in the HUD
HUD.hide(true)
# ignore the selectionchange event generated by find()
document.removeEventListener("selectionchange",restoreDefaultSelectionHighlight, true)
result = window.find(query, options.caseSensitive, options.backwards, true, false, true, false)
setTimeout(
-> document.addEventListener("selectionchange", restoreDefaultSelectionHighlight, true)
0)
findMode = oldFindMode
# we need to save the anchor node here because <esc> seems to nullify it, regardless of whether we do
# preventDefault()
findModeAnchorNode = document.getSelection().anchorNode
result
restoreDefaultSelectionHighlight = -> document.body.classList.remove("vimiumFindMode")
focusFoundLink = ->
if (findModeQueryHasResults)
link = getLinkFromSelection()
link.focus() if link
isDOMDescendant = (parent, child) ->
node = child
while (node != null)
return true if (node == parent)
node = node.parentNode
false
selectFoundInputElement = ->
# if the found text is in an input element, getSelection().anchorNode will be null, so we use activeElement
# instead. however, since the last focused element might not be the one currently pointed to by find (e.g.
# the current one might be disabled and therefore unable to receive focus), we use the approximate
# heuristic of checking that the last anchor node is an ancestor of our element.
if (findModeQueryHasResults && document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement))
DomUtils.simulateSelect(document.activeElement)
# the element has already received focus via find(), so invoke insert mode manually
enterInsertModeWithoutShowingIndicator(document.activeElement)
getNextQueryFromRegexMatches = (stepSize) ->
# find()ing an empty query always returns false
return "" unless findModeQuery.regexMatches
totalMatches = findModeQuery.regexMatches.length
findModeQuery.activeRegexIndex += stepSize + totalMatches
findModeQuery.activeRegexIndex %= totalMatches
findModeQuery.regexMatches[findModeQuery.activeRegexIndex]
findAndFocus = (backwards) ->
# check if the query has been changed by a script in another frame
mostRecentQuery = settings.get("findModeRawQuery") || ""
if (mostRecentQuery != findModeQuery.rawQuery)
findModeQuery.rawQuery = mostRecentQuery
updateFindModeQuery()
query =
if findModeQuery.isRegex
getNextQueryFromRegexMatches(if backwards then -1 else 1)
else
findModeQuery.parsedQuery
findModeQueryHasResults =
executeFind(query, { backwards: backwards, caseSensitive: !findModeQuery.ignoreCase })
if (!findModeQueryHasResults)
HUD.showForDuration("No matches for '" + findModeQuery.rawQuery + "'", 1000)
return
# if we have found an input element via 'n', pressing <esc> immediately afterwards sends us into insert
# mode
elementCanTakeInput = document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement)
if (elementCanTakeInput)
handlerStack.push({
keydown: (event) ->
@remove()
if (KeyboardUtils.isEscape(event))
DomUtils.simulateSelect(document.activeElement)
enterInsertModeWithoutShowingIndicator(document.activeElement)
return false # we have "consumed" this event, so do not propagate
return true
})
focusFoundLink()
window.performFind = -> findAndFocus()
window.performBackwardsFind = -> findAndFocus(true)
getLinkFromSelection = ->
node = window.getSelection().anchorNode
while (node && node != document.body)
return node if (node.nodeName.toLowerCase() == "a")
node = node.parentNode
null
# used by the findAndFollow* functions.
followLink = (linkElement) ->
if (linkElement.nodeName.toLowerCase() == "link")
window.location.href = linkElement.href
else
# if we can click on it, don't simply set location.href: some next/prev links are meant to trigger AJAX
# calls, like the 'more' button on GitHub's newsfeed.
linkElement.scrollIntoView()
linkElement.focus()
DomUtils.simulateClick(linkElement)
#
# Find and follow a link which matches any one of a list of strings. If there are multiple such links, they
# are prioritized for shortness, by their position in :linkStrings, how far down the page they are located,
# and finally by whether the match is exact. Practically speaking, this means we favor 'next page' over 'the
# next big thing', and 'more' over 'nextcompany', even if 'next' occurs before 'more' in :linkStrings.
#
findAndFollowLink = (linkStrings) ->
linksXPath = DomUtils.makeXPath(["a", "*[@onclick or @role='link' or contains(@class, 'button')]"])
links = DomUtils.evaluateXPath(linksXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
candidateLinks = []
# at the end of this loop, candidateLinks will contain all visible links that match our patterns
# links lower in the page are more likely to be the ones we want, so we loop through the snapshot backwards
for i in [(links.snapshotLength - 1)..0] by -1
link = links.snapshotItem(i)
# ensure link is visible (we don't mind if it is scrolled offscreen)
boundingClientRect = link.getBoundingClientRect()
if (boundingClientRect.width == 0 || boundingClientRect.height == 0)
continue
computedStyle = window.getComputedStyle(link, null)
if (computedStyle.getPropertyValue("visibility") != "visible" ||
computedStyle.getPropertyValue("display") == "none")
continue
linkMatches = false
for linkString in linkStrings
if (link.innerText.toLowerCase().indexOf(linkString) != -1)
linkMatches = true
break
continue unless linkMatches
candidateLinks.push(link)
return if (candidateLinks.length == 0)
for link in candidateLinks
link.wordCount = link.innerText.trim().split(/\s+/).length
# We can use this trick to ensure that Array.sort is stable. We need this property to retain the reverse
# in-page order of the links.
candidateLinks.forEach((a,i) -> a.originalIndex = i)
# favor shorter links, and ignore those that are more than one word longer than the shortest link
candidateLinks =
candidateLinks
.sort((a, b) ->
if (a.wordCount == b.wordCount) then a.originalIndex - b.originalIndex else a.wordCount - b.wordCount
)
.filter((a) -> a.wordCount <= candidateLinks[0].wordCount + 1)
for linkString in linkStrings
exactWordRegex =
if /\b/.test(linkString[0]) or /\b/.test(linkString[linkString.length - 1])
new RegExp "\\b" + linkString + "\\b", "i"
else
new RegExp linkString, "i"
for candidateLink in candidateLinks
if (exactWordRegex.test(candidateLink.innerText))
followLink(candidateLink)
return true
false
findAndFollowRel = (value) ->
relTags = ["link", "a", "area"]
for tag in relTags
elements = document.getElementsByTagName(tag)
for element in elements
if (element.hasAttribute("rel") && element.rel == value)
followLink(element)
return true
window.goPrevious = ->
previousPatterns = settings.get("previousPatterns") || ""
previousStrings = previousPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("prev") || findAndFollowLink(previousStrings)
window.goNext = ->
nextPatterns = settings.get("nextPatterns") || ""
nextStrings = nextPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("next") || findAndFollowLink(nextStrings)
showFindModeHUDForQuery = ->
if (findModeQueryHasResults || findModeQuery.parsedQuery.length == 0)
HUD.show("/" + findModeQuery.rawQuery + " (" + findModeQuery.matchCount + " Matches)")
else
HUD.show("/" + findModeQuery.rawQuery + " (No Matches)")
window.enterFindMode = ->
findModeQuery = { rawQuery: "" }
findMode = true
HUD.show("/")
exitFindMode = ->
findMode = false
HUD.hide()
window.showHelpDialog = (html, fid) ->
return if (isShowingHelpDialog || !document.body || fid != frameId)
isShowingHelpDialog = true
container = document.createElement("div")
container.id = "vimiumHelpDialogContainer"
container.className = "vimiumReset"
document.body.appendChild(container)
container.innerHTML = html
container.getElementsByClassName("closeButton")[0].addEventListener("click", hideHelpDialog, false)
VimiumHelpDialog =
# This setting is pulled out of local storage. It's false by default.
getShowAdvancedCommands: -> settings.get("helpDialog_showAdvancedCommands")
init: () ->
this.dialogElement = document.getElementById("vimiumHelpDialog")
this.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].addEventListener("click",
VimiumHelpDialog.toggleAdvancedCommands, false)
this.dialogElement.style.maxHeight = window.innerHeight - 80
this.showAdvancedCommands(this.getShowAdvancedCommands())
#
# Advanced commands are hidden by default so they don't overwhelm new and casual users.
#
toggleAdvancedCommands: (event) ->
event.preventDefault()
showAdvanced = VimiumHelpDialog.getShowAdvancedCommands()
VimiumHelpDialog.showAdvancedCommands(!showAdvanced)
settings.set("helpDialog_showAdvancedCommands", !showAdvanced)
showAdvancedCommands: (visible) ->
VimiumHelpDialog.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].innerHTML =
if visible then "Hide advanced commands" else "Show advanced commands"
advancedEls = VimiumHelpDialog.dialogElement.getElementsByClassName("advanced")
for el in advancedEls
el.style.display = if visible then "table-row" else "none"
VimiumHelpDialog.init()
container.getElementsByClassName("optionsPage")[0].addEventListener("click", (clickEvent) ->
clickEvent.preventDefault()
chrome.runtime.sendMessage({handler: "openOptionsPageInNewTab"})
false)
hideHelpDialog = (clickEvent) ->
isShowingHelpDialog = false
helpDialog = document.getElementById("vimiumHelpDialogContainer")
if (helpDialog)
helpDialog.parentNode.removeChild(helpDialog)
if (clickEvent)
clickEvent.preventDefault()
toggleHelpDialog = (html, fid) ->
if (isShowingHelpDialog)
hideHelpDialog()
else
showHelpDialog(html, fid)
#
# A heads-up-display (HUD) for showing Vimium page operations.
# Note: you cannot interact with the HUD until document.body is available.
#
HUD =
_tweenId: -1
_displayElement: null
_upgradeNotificationElement: null
# This HUD is styled to precisely mimick the chrome HUD on Mac. Use the "has_popup_and_link_hud.html"
# test harness to tweak these styles to match Chrome's. One limitation of our HUD display is that
# it doesn't sit on top of horizontal scrollbars like Chrome's HUD does.
showForDuration: (text, duration) ->
HUD.show(text)
HUD._showForDurationTimerId = setTimeout((-> HUD.hide()), duration)
show: (text) ->
return unless HUD.enabled()
clearTimeout(HUD._showForDurationTimerId)
HUD.displayElement().innerText = text
clearInterval(HUD._tweenId)
HUD._tweenId = Tween.fade(HUD.displayElement(), 1.0, 150)
HUD.displayElement().style.display = ""
showUpgradeNotification: (version) ->
HUD.upgradeNotificationElement().innerHTML = "Vimium has been updated to
<a class='vimiumReset'
href='https://chrome.google.com/extensions/detail/dbepggeogbaibhgnhhndojpepiihcmeb'>
#{version}</a>.<a class='vimiumReset close-button' href='#'>x</a>"
links = HUD.upgradeNotificationElement().getElementsByTagName("a")
links[0].addEventListener("click", HUD.onUpdateLinkClicked, false)
links[1].addEventListener "click", (event) ->
event.preventDefault()
HUD.onUpdateLinkClicked()
Tween.fade(HUD.upgradeNotificationElement(), 1.0, 150)
onUpdateLinkClicked: (event) ->
HUD.hideUpgradeNotification()
chrome.runtime.sendMessage({ handler: "upgradeNotificationClosed" })
hideUpgradeNotification: (clickEvent) ->
Tween.fade(HUD.upgradeNotificationElement(), 0, 150,
-> HUD.upgradeNotificationElement().style.display = "none")
#
# Retrieves the HUD HTML element.
#
displayElement: ->
if (!HUD._displayElement)
HUD._displayElement = HUD.createHudElement()
# Keep this far enough to the right so that it doesn't collide with the "popups blocked" chrome HUD.
HUD._displayElement.style.right = "150px"
HUD._displayElement
upgradeNotificationElement: ->
if (!HUD._upgradeNotificationElement)
HUD._upgradeNotificationElement = HUD.createHudElement()
# Position this just to the left of our normal HUD.
HUD._upgradeNotificationElement.style.right = "315px"
HUD._upgradeNotificationElement
createHudElement: ->
element = document.createElement("div")
element.className = "vimiumReset vimiumHUD"
document.body.appendChild(element)
element
hide: (immediate) ->
clearInterval(HUD._tweenId)
if (immediate)
HUD.displayElement().style.display = "none"
else
HUD._tweenId = Tween.fade(HUD.displayElement(), 0, 150,
-> HUD.displayElement().style.display = "none")
isReady: -> document.body != null
# A preference which can be toggled in the Options page. */
enabled: -> !settings.get("hideHud")
Tween =
#
# Fades an element's alpha. Returns a timer ID which can be used to stop the tween via clearInterval.
#
fade: (element, toAlpha, duration, onComplete) ->
state = {}
state.duration = duration
state.startTime = (new Date()).getTime()
state.from = parseInt(element.style.opacity) || 0
state.to = toAlpha
state.onUpdate = (value) ->
element.style.opacity = value
if (value == state.to && onComplete)
onComplete()
state.timerId = setInterval((-> Tween.performTweenStep(state)), 50)
state.timerId
performTweenStep: (state) ->
elapsed = (new Date()).getTime() - state.startTime
if (elapsed >= state.duration)
clearInterval(state.timerId)
state.onUpdate(state.to)
else
value = (elapsed / state.duration) * (state.to - state.from) + state.from
state.onUpdate(value)
initializePreDomReady()
window.addEventListener("DOMContentLoaded", initializeOnDomReady)
window.onbeforeunload = ->
chrome.runtime.sendMessage(
handler: "updateScrollPosition"
scrollX: window.scrollX
scrollY: window.scrollY)
root = exports ? window
root.settings = settings
root.HUD = HUD
root.handlerStack = handlerStack
root.frameId = frameId
| 127475 | #
# This content script takes input from its webpage and executes commands locally on behalf of the background
# page. It must be run prior to domReady so that we perform some operations very early. We tell the
# background page that we're in domReady and ready to accept normal commands by connectiong to a port named
# "domReady".
#
window.handlerStack = new HandlerStack
insertModeLock = null
findMode = false
findModeQuery = { rawQuery: "", matchCount: 0 }
findModeQueryHasResults = false
findModeAnchorNode = null
isShowingHelpDialog = false
keyPort = null
# Users can disable Vimium on URL patterns via the settings page.
isEnabledForUrl = true
# The user's operating system.
currentCompletionKeys = null
validFirstKeys = null
# The types in <input type="..."> that we consider for focusInput command. Right now this is recalculated in
# each content script. Alternatively we could calculate it once in the background page and use a request to
# fetch it each time.
# Should we include the HTML5 date pickers here?
# The corresponding XPath for such elements.
textInputXPath = (->
textInputTypes = ["text", "search", "email", "url", "number", "password"]
inputElements = ["input[" +
"(" + textInputTypes.map((type) -> '@type="' + type + '"').join(" or ") + "or not(@type))" +
" and not(@disabled or @readonly)]",
"textarea", "*[@contenteditable='' or translate(@contenteditable, 'TRUE', 'true')='true']"]
DomUtils.makeXPath(inputElements)
)()
#
# settings provides a browser-global localStorage-backed dict. get() and set() are synchronous, but load()
# must be called beforehand to ensure get() will return up-to-date values.
#
settings =
port: null
values: {}
loadedValues: 0
valuesToLoad: ["scrollStepSize", "linkHintCharacters", "linkHintNumbers", "filterLinkHints", "hideHud",
"previousPatterns", "nextPatterns", "findModeRawQuery", "regexFindMode", "userDefinedLinkHintCss",
"helpDialog_showAdvancedCommands"]
isLoaded: false
eventListeners: {}
init: ->
@port = chrome.runtime.connect({ name: "settings" })
@port.onMessage.addListener(@receiveMessage)
get: (key) -> @values[key]
set: (key, value) ->
@init() unless @port
@values[key] = value
@port.postMessage({ operation: "set", key: key, value: value })
load: ->
@init() unless @port
for i of @valuesToLoad
@port.postMessage({ operation: "get", key: @valuesToLoad[i] })
receiveMessage: (args) ->
# not using 'this' due to issues with binding on callback
settings.values[args.key] = args.value
# since load() can be called more than once, loadedValues can be greater than valuesToLoad, but we test
# for equality so initializeOnReady only runs once
if (++settings.loadedValues == settings.valuesToLoad.length)
settings.isLoaded = true
listener = null
while (listener = settings.eventListeners["load"].pop())
listener()
addEventListener: (eventName, callback) ->
if (!(eventName of @eventListeners))
@eventListeners[eventName] = []
@eventListeners[eventName].push(callback)
#
# Give this frame a unique id.
#
frameId = Math.floor(Math.random()*999999999)
hasModifiersRegex = /^<([amc]-)+.>/
#
# Complete initialization work that sould be done prior to DOMReady.
#
initializePreDomReady = ->
settings.addEventListener("load", LinkHints.init.bind(LinkHints))
settings.load()
Scroller.init()
checkIfEnabledForUrl()
refreshCompletionKeys()
# Send the key to the key handler in the background page.
keyPort = chrome.runtime.connect({ name: "keyDown" })
requestHandlers =
hideUpgradeNotification: -> HUD.hideUpgradeNotification()
showUpgradeNotification: (request) -> HUD.showUpgradeNotification(request.version)
showHUDforDuration: (request) -> HUD.showForDuration request.text, request.duration
toggleHelpDialog: (request) -> toggleHelpDialog(request.dialogHtml, request.frameId)
focusFrame: (request) -> if (frameId == request.frameId) then focusThisFrame(request.highlight)
refreshCompletionKeys: refreshCompletionKeys
getScrollPosition: -> scrollX: window.scrollX, scrollY: window.scrollY
setScrollPosition: (request) -> setScrollPosition request.scrollX, request.scrollY
executePageCommand: executePageCommand
getActiveState: -> { enabled: isEnabledForUrl }
disableVimium: disableVimium
chrome.runtime.onMessage.addListener (request, sender, sendResponse) ->
# in the options page, we will receive requests from both content and background scripts. ignore those
# from the former.
return if sender.tab and not sender.tab.url.startsWith 'chrome-extension://'
return unless isEnabledForUrl or request.name == 'getActiveState'
sendResponse requestHandlers[request.name](request, sender)
# Ensure the sendResponse callback is freed.
false
#
# This is called once the background page has told us that Vimium should be enabled for the current URL.
#
initializeWhenEnabled = ->
document.addEventListener("keydown", onKeydown, true)
document.addEventListener("keypress", onKeypress, true)
document.addEventListener("keyup", onKeyup, true)
document.addEventListener("focus", onFocusCapturePhase, true)
document.addEventListener("blur", onBlurCapturePhase, true)
document.addEventListener("DOMActivate", onDOMActivate, true)
enterInsertModeIfElementIsFocused()
#
# Used to disable Vimium without needing to reload the page.
# This is called if the current page's url is blacklisted using the popup UI.
#
disableVimium = ->
document.removeEventListener("keydown", onKeydown, true)
document.removeEventListener("keypress", onKeypress, true)
document.removeEventListener("keyup", onKeyup, true)
document.removeEventListener("focus", onFocusCapturePhase, true)
document.removeEventListener("blur", onBlurCapturePhase, true)
document.removeEventListener("DOMActivate", onDOMActivate, true)
isEnabledForUrl = false
#
# The backend needs to know which frame has focus.
#
window.addEventListener "focus", ->
# settings may have changed since the frame last had focus
settings.load()
chrome.runtime.sendMessage({ handler: "frameFocused", frameId: frameId })
#
# Initialization tasks that must wait for the document to be ready.
#
initializeOnDomReady = ->
registerFrameIfSizeAvailable(window.top == window.self)
enterInsertModeIfElementIsFocused() if isEnabledForUrl
# Tell the background page we're in the dom ready state.
chrome.runtime.connect({ name: "domReady" })
# This is a little hacky but sometimes the size wasn't available on domReady?
registerFrameIfSizeAvailable = (is_top) ->
if (innerWidth != undefined && innerWidth != 0 && innerHeight != undefined && innerHeight != 0)
chrome.runtime.sendMessage(
handler: "registerFrame"
frameId: frameId
area: innerWidth * innerHeight
is_top: is_top
total: frames.length + 1)
else
setTimeout((-> registerFrameIfSizeAvailable(is_top)), 100)
#
# Enters insert mode if the currently focused element in the DOM is focusable.
#
enterInsertModeIfElementIsFocused = ->
if (document.activeElement && isEditable(document.activeElement) && !findMode)
enterInsertModeWithoutShowingIndicator(document.activeElement)
onDOMActivate = (event) -> handlerStack.bubbleEvent 'DOMActivate', event
executePageCommand = (request) ->
return unless frameId == request.frameId
if (request.passCountToFunction)
Utils.invokeCommandString(request.command, [request.count])
else
Utils.invokeCommandString(request.command) for i in [0...request.count]
refreshCompletionKeys(request)
setScrollPosition = (scrollX, scrollY) ->
if (scrollX > 0 || scrollY > 0)
DomUtils.documentReady(-> window.scrollTo(scrollX, scrollY))
#
# Called from the backend in order to change frame focus.
#
window.focusThisFrame = (shouldHighlight) ->
window.focus()
if (document.body && shouldHighlight)
borderWas = document.body.style.border
document.body.style.border = '5px solid yellow'
setTimeout((-> document.body.style.border = borderWas), 200)
extend window,
scrollToBottom: -> Scroller.scrollTo "y", "max"
scrollToTop: -> Scroller.scrollTo "y", 0
scrollToLeft: -> Scroller.scrollTo "x", 0
scrollToRight: -> Scroller.scrollTo "x", "max"
scrollUp: -> Scroller.scrollBy "y", -1 * settings.get("scrollStepSize")
scrollDown: -> Scroller.scrollBy "y", settings.get("scrollStepSize")
scrollPageUp: -> Scroller.scrollBy "y", "viewSize", -1/2
scrollPageDown: -> Scroller.scrollBy "y", "viewSize", 1/2
scrollFullPageUp: -> Scroller.scrollBy "y", "viewSize", -1
scrollFullPageDown: -> Scroller.scrollBy "y", "viewSize"
scrollLeft: -> Scroller.scrollBy "x", -1 * settings.get("scrollStepSize")
scrollRight: -> Scroller.scrollBy "x", settings.get("scrollStepSize")
extend window,
reload: -> window.location.reload()
goBack: (count) -> history.go(-count)
goForward: (count) -> history.go(count)
goUp: (count) ->
url = window.location.href
if (url[url.length - 1] == "/")
url = url.substring(0, url.length - 1)
urlsplit = url.split("/")
# make sure we haven't hit the base domain yet
if (urlsplit.length > 3)
urlsplit = urlsplit.slice(0, Math.max(3, urlsplit.length - count))
window.location.href = urlsplit.join('/')
goToRoot: () ->
window.location.href = window.location.origin
toggleViewSource: ->
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
if (url.substr(0, 12) == "view-source:")
url = url.substr(12, url.length - 12)
else
url = "view-source:" + url
chrome.runtime.sendMessage({ handler: "openUrlInNewTab", url: url, selected: true })
copyCurrentUrl: ->
# TODO(<NAME>): When the following bug is fixed, revisit this approach of sending back to the background
# page to copy.
# http://code.google.com/p/chromium/issues/detail?id=55188
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
chrome.runtime.sendMessage { handler: "copyToClipboard", data: url }
HUD.showForDuration("Yanked URL", 1000)
focusInput: (count) ->
# Focus the first input element on the page, and create overlays to highlight all the input elements, with
# the currently-focused element highlighted specially. Tabbing will shift focus to the next input element.
# Pressing any other key will remove the overlays and the special tab behavior.
resultSet = DomUtils.evaluateXPath(textInputXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
visibleInputs =
for i in [0...resultSet.snapshotLength] by 1
element = resultSet.snapshotItem(i)
rect = DomUtils.getVisibleClientRect(element)
continue if rect == null
{ element: element, rect: rect }
return if visibleInputs.length == 0
selectedInputIndex = Math.min(count - 1, visibleInputs.length - 1)
visibleInputs[selectedInputIndex].element.focus()
return if visibleInputs.length == 1
hints = for tuple in visibleInputs
hint = document.createElement("div")
hint.className = "vimiumReset internalVimiumInputHint vimiumInputHint"
# minus 1 for the border
hint.style.left = (tuple.rect.left - 1) + window.scrollX + "px"
hint.style.top = (tuple.rect.top - 1) + window.scrollY + "px"
hint.style.width = tuple.rect.width + "px"
hint.style.height = tuple.rect.height + "px"
hint
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
hintContainingDiv = DomUtils.addElementList(hints,
{ id: "vimiumInputMarkerContainer", className: "vimiumReset" })
handlerStack.push keydown: (event) ->
if event.keyCode == KeyboardUtils.keyCodes.tab
hints[selectedInputIndex].classList.remove 'internalVimiumSelectedInputHint'
if event.shiftKey
if --selectedInputIndex == -1
selectedInputIndex = hints.length - 1
else
if ++selectedInputIndex == hints.length
selectedInputIndex = 0
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
visibleInputs[selectedInputIndex].element.focus()
else unless event.keyCode == KeyboardUtils.keyCodes.shiftKey
DomUtils.removeElement hintContainingDiv
@remove()
return true
false
#
# Sends everything except i & ESC to the handler in background_page. i & ESC are special because they control
# insert mode which is local state to the page. The key will be are either a single ascii letter or a
# key-modifier pair, e.g. <c-a> for control a.
#
# Note that some keys will only register keydown events and not keystroke events, e.g. ESC.
#
onKeypress = (event) ->
return unless handlerStack.bubbleEvent('keypress', event)
keyChar = ""
# Ignore modifier keys by themselves.
if (event.keyCode > 31)
keyChar = String.fromCharCode(event.charCode)
# Enter insert mode when the user enables the native find interface.
if (keyChar == "f" && KeyboardUtils.isPrimaryModifierKey(event))
enterInsertModeWithoutShowingIndicator()
return
if (keyChar)
if (findMode)
handleKeyCharForFindMode(keyChar)
DomUtils.suppressEvent(event)
else if (!isInsertMode() && !findMode)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
onKeydown = (event) ->
return unless handlerStack.bubbleEvent('keydown', event)
keyChar = ""
# handle special keys, and normal input keys with modifiers being pressed. don't handle shiftKey alone (to
# avoid / being interpreted as ?
if (((event.metaKey || event.ctrlKey || event.altKey) && event.keyCode > 31) || (
# TODO(philc): some events don't have a keyidentifier. How is that possible?
event.keyIdentifier && event.keyIdentifier.slice(0, 2) != "U+"))
keyChar = KeyboardUtils.getKeyChar(event)
# Again, ignore just modifiers. Maybe this should replace the keyCode>31 condition.
if (keyChar != "")
modifiers = []
if (event.shiftKey)
keyChar = keyChar.toUpperCase()
if (event.metaKey)
modifiers.push("m")
if (event.ctrlKey)
modifiers.push("c")
if (event.altKey)
modifiers.push("a")
for i of modifiers
keyChar = modifiers[i] + "-" + keyChar
if (modifiers.length > 0 || keyChar.length > 1)
keyChar = "<" + keyChar + ">"
if (isInsertMode() && KeyboardUtils.isEscape(event))
# Note that we can't programmatically blur out of Flash embeds from Javascript.
if (!isEmbed(event.srcElement))
# Remove focus so the user can't just get himself back into insert mode by typing in the same input
# box.
if (isEditable(event.srcElement))
event.srcElement.blur()
exitInsertMode()
DomUtils.suppressEvent(event)
else if (findMode)
if (KeyboardUtils.isEscape(event))
handleEscapeForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.backspace || event.keyCode == keyCodes.deleteKey)
handleDeleteForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.enter)
handleEnterForFindMode()
DomUtils.suppressEvent(event)
else if (!modifiers)
event.stopPropagation()
else if (isShowingHelpDialog && KeyboardUtils.isEscape(event))
hideHelpDialog()
else if (!isInsertMode() && !findMode)
if (keyChar)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
else if (KeyboardUtils.isEscape(event))
keyPort.postMessage({ keyChar:"<ESC>", frameId:frameId })
# Added to prevent propagating this event to other listeners if it's one that'll trigger a Vimium command.
# The goal is to avoid the scenario where Google Instant Search uses every keydown event to dump us
# back into the search box. As a side effect, this should also prevent overriding by other sites.
#
# Subject to internationalization issues since we're using keyIdentifier instead of charCode (in keypress).
#
# TOOD(<NAME>): Revisit this. Not sure it's the absolute best approach.
if (keyChar == "" && !isInsertMode() &&
(currentCompletionKeys.indexOf(KeyboardUtils.getKeyChar(event)) != -1 ||
isValidFirstKey(KeyboardUtils.getKeyChar(event))))
event.stopPropagation()
onKeyup = (event) -> return unless handlerStack.bubbleEvent('keyup', event)
checkIfEnabledForUrl = ->
url = window.location.toString()
chrome.runtime.sendMessage { handler: "isEnabledForUrl", url: url }, (response) ->
isEnabledForUrl = response.isEnabledForUrl
if (isEnabledForUrl)
initializeWhenEnabled()
else if (HUD.isReady())
# Quickly hide any HUD we might already be showing, e.g. if we entered insert mode on page load.
HUD.hide()
refreshCompletionKeys = (response) ->
if (response)
currentCompletionKeys = response.completionKeys
if (response.validFirstKeys)
validFirstKeys = response.validFirstKeys
else
chrome.runtime.sendMessage({ handler: "getCompletionKeys" }, refreshCompletionKeys)
isValidFirstKey = (keyChar) ->
validFirstKeys[keyChar] || /[<KEY>(keyChar)
onFocusCapturePhase = (event) ->
if (isFocusable(event.target) && !findMode)
enterInsertModeWithoutShowingIndicator(event.target)
onBlurCapturePhase = (event) ->
if (isFocusable(event.target))
exitInsertMode(event.target)
#
# Returns true if the element is focusable. This includes embeds like Flash, which steal the keybaord focus.
#
isFocusable = (element) -> isEditable(element) || isEmbed(element)
#
# Embedded elements like Flash and quicktime players can obtain focus but cannot be programmatically
# unfocused.
#
isEmbed = (element) -> ["embed", "object"].indexOf(element.nodeName.toLowerCase()) > 0
#
# Input or text elements are considered focusable and able to receieve their own keyboard events,
# and will enter enter mode if focused. Also note that the "contentEditable" attribute can be set on
# any element which makes it a rich text editor, like the notes on jjot.com.
#
isEditable = (target) ->
return true if target.isContentEditable
nodeName = target.nodeName.toLowerCase()
# use a blacklist instead of a whitelist because new form controls are still being implemented for html5
noFocus = ["radio", "checkbox"]
if (nodeName == "input" && noFocus.indexOf(target.type) == -1)
return true
focusableElements = ["textarea", "select"]
focusableElements.indexOf(nodeName) >= 0
#
# Enters insert mode and show an "Insert mode" message. Showing the UI is only useful when entering insert
# mode manually by pressing "i". In most cases we do not show any UI (enterInsertModeWithoutShowingIndicator)
#
window.enterInsertMode = (target) ->
enterInsertModeWithoutShowingIndicator(target)
HUD.show("Insert mode")
#
# We cannot count on 'focus' and 'blur' events to happen sequentially. For example, if blurring element A
# causes element B to come into focus, we may get "B focus" before "A blur". Thus we only leave insert mode
# when the last editable element that came into focus -- which insertModeLock points to -- has been blurred.
# If insert mode is entered manually (via pressing 'i'), then we set insertModeLock to 'undefined', and only
# leave insert mode when the user presses <ESC>.
#
enterInsertModeWithoutShowingIndicator = (target) -> insertModeLock = target
exitInsertMode = (target) ->
if (target == undefined || insertModeLock == target)
insertModeLock = null
HUD.hide()
isInsertMode = -> insertModeLock != null
# should be called whenever rawQuery is modified.
updateFindModeQuery = ->
# the query can be treated differently (e.g. as a plain string versus regex depending on the presence of
# escape sequences. '\' is the escape character and needs to be escaped itself to be used as a normal
# character. here we grep for the relevant escape sequences.
findModeQuery.isRegex = settings.get 'regexFindMode'
hasNoIgnoreCaseFlag = false
findModeQuery.parsedQuery = findModeQuery.rawQuery.replace /\\./g, (match) ->
switch (match)
when "\\r"
findModeQuery.isRegex = true
return ""
when "\\R"
findModeQuery.isRegex = false
return ""
when "\\I"
hasNoIgnoreCaseFlag = true
return ""
when "\\\\"
return "\\"
else
return match
# default to 'smartcase' mode, unless noIgnoreCase is explicitly specified
findModeQuery.ignoreCase = !hasNoIgnoreCaseFlag && !Utils.hasUpperCase(findModeQuery.parsedQuery)
# if we are dealing with a regex, grep for all matches in the text, and then call window.find() on them
# sequentially so the browser handles the scrolling / text selection.
if findModeQuery.isRegex
try
pattern = new RegExp(findModeQuery.parsedQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
catch error
# if we catch a SyntaxError, assume the user is not done typing yet and return quietly
return
# innerText will not return the text of hidden elements, and strip out tags while preserving newlines
text = document.body.innerText
findModeQuery.regexMatches = text.match(pattern)
findModeQuery.activeRegexIndex = 0
findModeQuery.matchCount = findModeQuery.regexMatches?.length
# if we are doing a basic plain string match, we still want to grep for matches of the string, so we can
# show a the number of results. We can grep on document.body.innerText, as it should be indistinguishable
# from the internal representation used by window.find.
else
# escape all special characters, so RegExp just parses the string 'as is'.
# Taken from http://stackoverflow.com/questions/3446170/escape-string-for-use-in-javascript-regex
escapeRegExp = /[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g
parsedNonRegexQuery = findModeQuery.parsedQuery.replace(escapeRegExp, (char) -> "\\" + char)
pattern = new RegExp(parsedNonRegexQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
text = document.body.innerText
findModeQuery.matchCount = text.match(pattern)?.length
handleKeyCharForFindMode = (keyChar) ->
findModeQuery.rawQuery += keyChar
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
handleEscapeForFindMode = ->
exitFindMode()
document.body.classList.remove("vimiumFindMode")
# removing the class does not re-color existing selections. we recreate the current selection so it reverts
# back to the default color.
selection = window.getSelection()
unless selection.isCollapsed
range = window.getSelection().getRangeAt(0)
window.getSelection().removeAllRanges()
window.getSelection().addRange(range)
focusFoundLink() || selectFoundInputElement()
handleDeleteForFindMode = ->
if (findModeQuery.rawQuery.length == 0)
exitFindMode()
performFindInPlace()
else
findModeQuery.rawQuery = findModeQuery.rawQuery.substring(0, findModeQuery.rawQuery.length - 1)
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
# <esc> sends us into insert mode if possible, but <cr> does not.
# <esc> corresponds approximately to 'nevermind, I have found it already' while <cr> means 'I want to save
# this query and do more searches with it'
handleEnterForFindMode = ->
exitFindMode()
focusFoundLink()
document.body.classList.add("vimiumFindMode")
settings.set("findModeRawQuery", findModeQuery.rawQuery)
performFindInPlace = ->
cachedScrollX = window.scrollX
cachedScrollY = window.scrollY
query = if findModeQuery.isRegex then getNextQueryFromRegexMatches(0) else findModeQuery.parsedQuery
# Search backwards first to "free up" the current word as eligible for the real forward search. This allows
# us to search in place without jumping around between matches as the query grows.
executeFind(query, { backwards: true, caseSensitive: !findModeQuery.ignoreCase })
# We need to restore the scroll position because we might've lost the right position by searching
# backwards.
window.scrollTo(cachedScrollX, cachedScrollY)
findModeQueryHasResults = executeFind(query, { caseSensitive: !findModeQuery.ignoreCase })
# :options is an optional dict. valid parameters are 'caseSensitive' and 'backwards'.
executeFind = (query, options) ->
options = options || {}
# rather hacky, but this is our way of signalling to the insertMode listener not to react to the focus
# changes that find() induces.
oldFindMode = findMode
findMode = true
document.body.classList.add("vimiumFindMode")
# prevent find from matching its own search query in the HUD
HUD.hide(true)
# ignore the selectionchange event generated by find()
document.removeEventListener("selectionchange",restoreDefaultSelectionHighlight, true)
result = window.find(query, options.caseSensitive, options.backwards, true, false, true, false)
setTimeout(
-> document.addEventListener("selectionchange", restoreDefaultSelectionHighlight, true)
0)
findMode = oldFindMode
# we need to save the anchor node here because <esc> seems to nullify it, regardless of whether we do
# preventDefault()
findModeAnchorNode = document.getSelection().anchorNode
result
restoreDefaultSelectionHighlight = -> document.body.classList.remove("vimiumFindMode")
focusFoundLink = ->
if (findModeQueryHasResults)
link = getLinkFromSelection()
link.focus() if link
isDOMDescendant = (parent, child) ->
node = child
while (node != null)
return true if (node == parent)
node = node.parentNode
false
selectFoundInputElement = ->
# if the found text is in an input element, getSelection().anchorNode will be null, so we use activeElement
# instead. however, since the last focused element might not be the one currently pointed to by find (e.g.
# the current one might be disabled and therefore unable to receive focus), we use the approximate
# heuristic of checking that the last anchor node is an ancestor of our element.
if (findModeQueryHasResults && document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement))
DomUtils.simulateSelect(document.activeElement)
# the element has already received focus via find(), so invoke insert mode manually
enterInsertModeWithoutShowingIndicator(document.activeElement)
getNextQueryFromRegexMatches = (stepSize) ->
# find()ing an empty query always returns false
return "" unless findModeQuery.regexMatches
totalMatches = findModeQuery.regexMatches.length
findModeQuery.activeRegexIndex += stepSize + totalMatches
findModeQuery.activeRegexIndex %= totalMatches
findModeQuery.regexMatches[findModeQuery.activeRegexIndex]
findAndFocus = (backwards) ->
# check if the query has been changed by a script in another frame
mostRecentQuery = settings.get("findModeRawQuery") || ""
if (mostRecentQuery != findModeQuery.rawQuery)
findModeQuery.rawQuery = mostRecentQuery
updateFindModeQuery()
query =
if findModeQuery.isRegex
getNextQueryFromRegexMatches(if backwards then -1 else 1)
else
findModeQuery.parsedQuery
findModeQueryHasResults =
executeFind(query, { backwards: backwards, caseSensitive: !findModeQuery.ignoreCase })
if (!findModeQueryHasResults)
HUD.showForDuration("No matches for '" + findModeQuery.rawQuery + "'", 1000)
return
# if we have found an input element via 'n', pressing <esc> immediately afterwards sends us into insert
# mode
elementCanTakeInput = document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement)
if (elementCanTakeInput)
handlerStack.push({
keydown: (event) ->
@remove()
if (KeyboardUtils.isEscape(event))
DomUtils.simulateSelect(document.activeElement)
enterInsertModeWithoutShowingIndicator(document.activeElement)
return false # we have "consumed" this event, so do not propagate
return true
})
focusFoundLink()
window.performFind = -> findAndFocus()
window.performBackwardsFind = -> findAndFocus(true)
getLinkFromSelection = ->
node = window.getSelection().anchorNode
while (node && node != document.body)
return node if (node.nodeName.toLowerCase() == "a")
node = node.parentNode
null
# used by the findAndFollow* functions.
followLink = (linkElement) ->
if (linkElement.nodeName.toLowerCase() == "link")
window.location.href = linkElement.href
else
# if we can click on it, don't simply set location.href: some next/prev links are meant to trigger AJAX
# calls, like the 'more' button on GitHub's newsfeed.
linkElement.scrollIntoView()
linkElement.focus()
DomUtils.simulateClick(linkElement)
#
# Find and follow a link which matches any one of a list of strings. If there are multiple such links, they
# are prioritized for shortness, by their position in :linkStrings, how far down the page they are located,
# and finally by whether the match is exact. Practically speaking, this means we favor 'next page' over 'the
# next big thing', and 'more' over 'nextcompany', even if 'next' occurs before 'more' in :linkStrings.
#
findAndFollowLink = (linkStrings) ->
linksXPath = DomUtils.makeXPath(["a", "*[@onclick or @role='link' or contains(@class, 'button')]"])
links = DomUtils.evaluateXPath(linksXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
candidateLinks = []
# at the end of this loop, candidateLinks will contain all visible links that match our patterns
# links lower in the page are more likely to be the ones we want, so we loop through the snapshot backwards
for i in [(links.snapshotLength - 1)..0] by -1
link = links.snapshotItem(i)
# ensure link is visible (we don't mind if it is scrolled offscreen)
boundingClientRect = link.getBoundingClientRect()
if (boundingClientRect.width == 0 || boundingClientRect.height == 0)
continue
computedStyle = window.getComputedStyle(link, null)
if (computedStyle.getPropertyValue("visibility") != "visible" ||
computedStyle.getPropertyValue("display") == "none")
continue
linkMatches = false
for linkString in linkStrings
if (link.innerText.toLowerCase().indexOf(linkString) != -1)
linkMatches = true
break
continue unless linkMatches
candidateLinks.push(link)
return if (candidateLinks.length == 0)
for link in candidateLinks
link.wordCount = link.innerText.trim().split(/\s+/).length
# We can use this trick to ensure that Array.sort is stable. We need this property to retain the reverse
# in-page order of the links.
candidateLinks.forEach((a,i) -> a.originalIndex = i)
# favor shorter links, and ignore those that are more than one word longer than the shortest link
candidateLinks =
candidateLinks
.sort((a, b) ->
if (a.wordCount == b.wordCount) then a.originalIndex - b.originalIndex else a.wordCount - b.wordCount
)
.filter((a) -> a.wordCount <= candidateLinks[0].wordCount + 1)
for linkString in linkStrings
exactWordRegex =
if /\b/.test(linkString[0]) or /\b/.test(linkString[linkString.length - 1])
new RegExp "\\b" + linkString + "\\b", "i"
else
new RegExp linkString, "i"
for candidateLink in candidateLinks
if (exactWordRegex.test(candidateLink.innerText))
followLink(candidateLink)
return true
false
findAndFollowRel = (value) ->
relTags = ["link", "a", "area"]
for tag in relTags
elements = document.getElementsByTagName(tag)
for element in elements
if (element.hasAttribute("rel") && element.rel == value)
followLink(element)
return true
window.goPrevious = ->
previousPatterns = settings.get("previousPatterns") || ""
previousStrings = previousPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("prev") || findAndFollowLink(previousStrings)
window.goNext = ->
nextPatterns = settings.get("nextPatterns") || ""
nextStrings = nextPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("next") || findAndFollowLink(nextStrings)
showFindModeHUDForQuery = ->
if (findModeQueryHasResults || findModeQuery.parsedQuery.length == 0)
HUD.show("/" + findModeQuery.rawQuery + " (" + findModeQuery.matchCount + " Matches)")
else
HUD.show("/" + findModeQuery.rawQuery + " (No Matches)")
window.enterFindMode = ->
findModeQuery = { rawQuery: "" }
findMode = true
HUD.show("/")
exitFindMode = ->
findMode = false
HUD.hide()
window.showHelpDialog = (html, fid) ->
return if (isShowingHelpDialog || !document.body || fid != frameId)
isShowingHelpDialog = true
container = document.createElement("div")
container.id = "vimiumHelpDialogContainer"
container.className = "vimiumReset"
document.body.appendChild(container)
container.innerHTML = html
container.getElementsByClassName("closeButton")[0].addEventListener("click", hideHelpDialog, false)
VimiumHelpDialog =
# This setting is pulled out of local storage. It's false by default.
getShowAdvancedCommands: -> settings.get("helpDialog_showAdvancedCommands")
init: () ->
this.dialogElement = document.getElementById("vimiumHelpDialog")
this.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].addEventListener("click",
VimiumHelpDialog.toggleAdvancedCommands, false)
this.dialogElement.style.maxHeight = window.innerHeight - 80
this.showAdvancedCommands(this.getShowAdvancedCommands())
#
# Advanced commands are hidden by default so they don't overwhelm new and casual users.
#
toggleAdvancedCommands: (event) ->
event.preventDefault()
showAdvanced = VimiumHelpDialog.getShowAdvancedCommands()
VimiumHelpDialog.showAdvancedCommands(!showAdvanced)
settings.set("helpDialog_showAdvancedCommands", !showAdvanced)
showAdvancedCommands: (visible) ->
VimiumHelpDialog.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].innerHTML =
if visible then "Hide advanced commands" else "Show advanced commands"
advancedEls = VimiumHelpDialog.dialogElement.getElementsByClassName("advanced")
for el in advancedEls
el.style.display = if visible then "table-row" else "none"
VimiumHelpDialog.init()
container.getElementsByClassName("optionsPage")[0].addEventListener("click", (clickEvent) ->
clickEvent.preventDefault()
chrome.runtime.sendMessage({handler: "openOptionsPageInNewTab"})
false)
hideHelpDialog = (clickEvent) ->
isShowingHelpDialog = false
helpDialog = document.getElementById("vimiumHelpDialogContainer")
if (helpDialog)
helpDialog.parentNode.removeChild(helpDialog)
if (clickEvent)
clickEvent.preventDefault()
toggleHelpDialog = (html, fid) ->
if (isShowingHelpDialog)
hideHelpDialog()
else
showHelpDialog(html, fid)
#
# A heads-up-display (HUD) for showing Vimium page operations.
# Note: you cannot interact with the HUD until document.body is available.
#
HUD =
_tweenId: -1
_displayElement: null
_upgradeNotificationElement: null
# This HUD is styled to precisely mimick the chrome HUD on Mac. Use the "has_popup_and_link_hud.html"
# test harness to tweak these styles to match Chrome's. One limitation of our HUD display is that
# it doesn't sit on top of horizontal scrollbars like Chrome's HUD does.
showForDuration: (text, duration) ->
HUD.show(text)
HUD._showForDurationTimerId = setTimeout((-> HUD.hide()), duration)
show: (text) ->
return unless HUD.enabled()
clearTimeout(HUD._showForDurationTimerId)
HUD.displayElement().innerText = text
clearInterval(HUD._tweenId)
HUD._tweenId = Tween.fade(HUD.displayElement(), 1.0, 150)
HUD.displayElement().style.display = ""
showUpgradeNotification: (version) ->
HUD.upgradeNotificationElement().innerHTML = "Vimium has been updated to
<a class='vimiumReset'
href='https://chrome.google.com/extensions/detail/dbepggeogbaibhgnhhndojpepiihcmeb'>
#{version}</a>.<a class='vimiumReset close-button' href='#'>x</a>"
links = HUD.upgradeNotificationElement().getElementsByTagName("a")
links[0].addEventListener("click", HUD.onUpdateLinkClicked, false)
links[1].addEventListener "click", (event) ->
event.preventDefault()
HUD.onUpdateLinkClicked()
Tween.fade(HUD.upgradeNotificationElement(), 1.0, 150)
onUpdateLinkClicked: (event) ->
HUD.hideUpgradeNotification()
chrome.runtime.sendMessage({ handler: "upgradeNotificationClosed" })
hideUpgradeNotification: (clickEvent) ->
Tween.fade(HUD.upgradeNotificationElement(), 0, 150,
-> HUD.upgradeNotificationElement().style.display = "none")
#
# Retrieves the HUD HTML element.
#
displayElement: ->
if (!HUD._displayElement)
HUD._displayElement = HUD.createHudElement()
# Keep this far enough to the right so that it doesn't collide with the "popups blocked" chrome HUD.
HUD._displayElement.style.right = "150px"
HUD._displayElement
upgradeNotificationElement: ->
if (!HUD._upgradeNotificationElement)
HUD._upgradeNotificationElement = HUD.createHudElement()
# Position this just to the left of our normal HUD.
HUD._upgradeNotificationElement.style.right = "315px"
HUD._upgradeNotificationElement
createHudElement: ->
element = document.createElement("div")
element.className = "vimiumReset vimiumHUD"
document.body.appendChild(element)
element
hide: (immediate) ->
clearInterval(HUD._tweenId)
if (immediate)
HUD.displayElement().style.display = "none"
else
HUD._tweenId = Tween.fade(HUD.displayElement(), 0, 150,
-> HUD.displayElement().style.display = "none")
isReady: -> document.body != null
# A preference which can be toggled in the Options page. */
enabled: -> !settings.get("hideHud")
Tween =
#
# Fades an element's alpha. Returns a timer ID which can be used to stop the tween via clearInterval.
#
fade: (element, toAlpha, duration, onComplete) ->
state = {}
state.duration = duration
state.startTime = (new Date()).getTime()
state.from = parseInt(element.style.opacity) || 0
state.to = toAlpha
state.onUpdate = (value) ->
element.style.opacity = value
if (value == state.to && onComplete)
onComplete()
state.timerId = setInterval((-> Tween.performTweenStep(state)), 50)
state.timerId
performTweenStep: (state) ->
elapsed = (new Date()).getTime() - state.startTime
if (elapsed >= state.duration)
clearInterval(state.timerId)
state.onUpdate(state.to)
else
value = (elapsed / state.duration) * (state.to - state.from) + state.from
state.onUpdate(value)
initializePreDomReady()
window.addEventListener("DOMContentLoaded", initializeOnDomReady)
window.onbeforeunload = ->
chrome.runtime.sendMessage(
handler: "updateScrollPosition"
scrollX: window.scrollX
scrollY: window.scrollY)
root = exports ? window
root.settings = settings
root.HUD = HUD
root.handlerStack = handlerStack
root.frameId = frameId
| true | #
# This content script takes input from its webpage and executes commands locally on behalf of the background
# page. It must be run prior to domReady so that we perform some operations very early. We tell the
# background page that we're in domReady and ready to accept normal commands by connectiong to a port named
# "domReady".
#
window.handlerStack = new HandlerStack
insertModeLock = null
findMode = false
findModeQuery = { rawQuery: "", matchCount: 0 }
findModeQueryHasResults = false
findModeAnchorNode = null
isShowingHelpDialog = false
keyPort = null
# Users can disable Vimium on URL patterns via the settings page.
isEnabledForUrl = true
# The user's operating system.
currentCompletionKeys = null
validFirstKeys = null
# The types in <input type="..."> that we consider for focusInput command. Right now this is recalculated in
# each content script. Alternatively we could calculate it once in the background page and use a request to
# fetch it each time.
# Should we include the HTML5 date pickers here?
# The corresponding XPath for such elements.
textInputXPath = (->
textInputTypes = ["text", "search", "email", "url", "number", "password"]
inputElements = ["input[" +
"(" + textInputTypes.map((type) -> '@type="' + type + '"').join(" or ") + "or not(@type))" +
" and not(@disabled or @readonly)]",
"textarea", "*[@contenteditable='' or translate(@contenteditable, 'TRUE', 'true')='true']"]
DomUtils.makeXPath(inputElements)
)()
#
# settings provides a browser-global localStorage-backed dict. get() and set() are synchronous, but load()
# must be called beforehand to ensure get() will return up-to-date values.
#
settings =
port: null
values: {}
loadedValues: 0
valuesToLoad: ["scrollStepSize", "linkHintCharacters", "linkHintNumbers", "filterLinkHints", "hideHud",
"previousPatterns", "nextPatterns", "findModeRawQuery", "regexFindMode", "userDefinedLinkHintCss",
"helpDialog_showAdvancedCommands"]
isLoaded: false
eventListeners: {}
init: ->
@port = chrome.runtime.connect({ name: "settings" })
@port.onMessage.addListener(@receiveMessage)
get: (key) -> @values[key]
set: (key, value) ->
@init() unless @port
@values[key] = value
@port.postMessage({ operation: "set", key: key, value: value })
load: ->
@init() unless @port
for i of @valuesToLoad
@port.postMessage({ operation: "get", key: @valuesToLoad[i] })
receiveMessage: (args) ->
# not using 'this' due to issues with binding on callback
settings.values[args.key] = args.value
# since load() can be called more than once, loadedValues can be greater than valuesToLoad, but we test
# for equality so initializeOnReady only runs once
if (++settings.loadedValues == settings.valuesToLoad.length)
settings.isLoaded = true
listener = null
while (listener = settings.eventListeners["load"].pop())
listener()
addEventListener: (eventName, callback) ->
if (!(eventName of @eventListeners))
@eventListeners[eventName] = []
@eventListeners[eventName].push(callback)
#
# Give this frame a unique id.
#
frameId = Math.floor(Math.random()*999999999)
hasModifiersRegex = /^<([amc]-)+.>/
#
# Complete initialization work that sould be done prior to DOMReady.
#
initializePreDomReady = ->
settings.addEventListener("load", LinkHints.init.bind(LinkHints))
settings.load()
Scroller.init()
checkIfEnabledForUrl()
refreshCompletionKeys()
# Send the key to the key handler in the background page.
keyPort = chrome.runtime.connect({ name: "keyDown" })
requestHandlers =
hideUpgradeNotification: -> HUD.hideUpgradeNotification()
showUpgradeNotification: (request) -> HUD.showUpgradeNotification(request.version)
showHUDforDuration: (request) -> HUD.showForDuration request.text, request.duration
toggleHelpDialog: (request) -> toggleHelpDialog(request.dialogHtml, request.frameId)
focusFrame: (request) -> if (frameId == request.frameId) then focusThisFrame(request.highlight)
refreshCompletionKeys: refreshCompletionKeys
getScrollPosition: -> scrollX: window.scrollX, scrollY: window.scrollY
setScrollPosition: (request) -> setScrollPosition request.scrollX, request.scrollY
executePageCommand: executePageCommand
getActiveState: -> { enabled: isEnabledForUrl }
disableVimium: disableVimium
chrome.runtime.onMessage.addListener (request, sender, sendResponse) ->
# in the options page, we will receive requests from both content and background scripts. ignore those
# from the former.
return if sender.tab and not sender.tab.url.startsWith 'chrome-extension://'
return unless isEnabledForUrl or request.name == 'getActiveState'
sendResponse requestHandlers[request.name](request, sender)
# Ensure the sendResponse callback is freed.
false
#
# This is called once the background page has told us that Vimium should be enabled for the current URL.
#
initializeWhenEnabled = ->
document.addEventListener("keydown", onKeydown, true)
document.addEventListener("keypress", onKeypress, true)
document.addEventListener("keyup", onKeyup, true)
document.addEventListener("focus", onFocusCapturePhase, true)
document.addEventListener("blur", onBlurCapturePhase, true)
document.addEventListener("DOMActivate", onDOMActivate, true)
enterInsertModeIfElementIsFocused()
#
# Used to disable Vimium without needing to reload the page.
# This is called if the current page's url is blacklisted using the popup UI.
#
disableVimium = ->
document.removeEventListener("keydown", onKeydown, true)
document.removeEventListener("keypress", onKeypress, true)
document.removeEventListener("keyup", onKeyup, true)
document.removeEventListener("focus", onFocusCapturePhase, true)
document.removeEventListener("blur", onBlurCapturePhase, true)
document.removeEventListener("DOMActivate", onDOMActivate, true)
isEnabledForUrl = false
#
# The backend needs to know which frame has focus.
#
window.addEventListener "focus", ->
# settings may have changed since the frame last had focus
settings.load()
chrome.runtime.sendMessage({ handler: "frameFocused", frameId: frameId })
#
# Initialization tasks that must wait for the document to be ready.
#
initializeOnDomReady = ->
registerFrameIfSizeAvailable(window.top == window.self)
enterInsertModeIfElementIsFocused() if isEnabledForUrl
# Tell the background page we're in the dom ready state.
chrome.runtime.connect({ name: "domReady" })
# This is a little hacky but sometimes the size wasn't available on domReady?
registerFrameIfSizeAvailable = (is_top) ->
if (innerWidth != undefined && innerWidth != 0 && innerHeight != undefined && innerHeight != 0)
chrome.runtime.sendMessage(
handler: "registerFrame"
frameId: frameId
area: innerWidth * innerHeight
is_top: is_top
total: frames.length + 1)
else
setTimeout((-> registerFrameIfSizeAvailable(is_top)), 100)
#
# Enters insert mode if the currently focused element in the DOM is focusable.
#
enterInsertModeIfElementIsFocused = ->
if (document.activeElement && isEditable(document.activeElement) && !findMode)
enterInsertModeWithoutShowingIndicator(document.activeElement)
onDOMActivate = (event) -> handlerStack.bubbleEvent 'DOMActivate', event
executePageCommand = (request) ->
return unless frameId == request.frameId
if (request.passCountToFunction)
Utils.invokeCommandString(request.command, [request.count])
else
Utils.invokeCommandString(request.command) for i in [0...request.count]
refreshCompletionKeys(request)
setScrollPosition = (scrollX, scrollY) ->
if (scrollX > 0 || scrollY > 0)
DomUtils.documentReady(-> window.scrollTo(scrollX, scrollY))
#
# Called from the backend in order to change frame focus.
#
window.focusThisFrame = (shouldHighlight) ->
window.focus()
if (document.body && shouldHighlight)
borderWas = document.body.style.border
document.body.style.border = '5px solid yellow'
setTimeout((-> document.body.style.border = borderWas), 200)
extend window,
scrollToBottom: -> Scroller.scrollTo "y", "max"
scrollToTop: -> Scroller.scrollTo "y", 0
scrollToLeft: -> Scroller.scrollTo "x", 0
scrollToRight: -> Scroller.scrollTo "x", "max"
scrollUp: -> Scroller.scrollBy "y", -1 * settings.get("scrollStepSize")
scrollDown: -> Scroller.scrollBy "y", settings.get("scrollStepSize")
scrollPageUp: -> Scroller.scrollBy "y", "viewSize", -1/2
scrollPageDown: -> Scroller.scrollBy "y", "viewSize", 1/2
scrollFullPageUp: -> Scroller.scrollBy "y", "viewSize", -1
scrollFullPageDown: -> Scroller.scrollBy "y", "viewSize"
scrollLeft: -> Scroller.scrollBy "x", -1 * settings.get("scrollStepSize")
scrollRight: -> Scroller.scrollBy "x", settings.get("scrollStepSize")
extend window,
reload: -> window.location.reload()
goBack: (count) -> history.go(-count)
goForward: (count) -> history.go(count)
goUp: (count) ->
url = window.location.href
if (url[url.length - 1] == "/")
url = url.substring(0, url.length - 1)
urlsplit = url.split("/")
# make sure we haven't hit the base domain yet
if (urlsplit.length > 3)
urlsplit = urlsplit.slice(0, Math.max(3, urlsplit.length - count))
window.location.href = urlsplit.join('/')
goToRoot: () ->
window.location.href = window.location.origin
toggleViewSource: ->
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
if (url.substr(0, 12) == "view-source:")
url = url.substr(12, url.length - 12)
else
url = "view-source:" + url
chrome.runtime.sendMessage({ handler: "openUrlInNewTab", url: url, selected: true })
copyCurrentUrl: ->
# TODO(PI:NAME:<NAME>END_PI): When the following bug is fixed, revisit this approach of sending back to the background
# page to copy.
# http://code.google.com/p/chromium/issues/detail?id=55188
chrome.runtime.sendMessage { handler: "getCurrentTabUrl" }, (url) ->
chrome.runtime.sendMessage { handler: "copyToClipboard", data: url }
HUD.showForDuration("Yanked URL", 1000)
focusInput: (count) ->
# Focus the first input element on the page, and create overlays to highlight all the input elements, with
# the currently-focused element highlighted specially. Tabbing will shift focus to the next input element.
# Pressing any other key will remove the overlays and the special tab behavior.
resultSet = DomUtils.evaluateXPath(textInputXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
visibleInputs =
for i in [0...resultSet.snapshotLength] by 1
element = resultSet.snapshotItem(i)
rect = DomUtils.getVisibleClientRect(element)
continue if rect == null
{ element: element, rect: rect }
return if visibleInputs.length == 0
selectedInputIndex = Math.min(count - 1, visibleInputs.length - 1)
visibleInputs[selectedInputIndex].element.focus()
return if visibleInputs.length == 1
hints = for tuple in visibleInputs
hint = document.createElement("div")
hint.className = "vimiumReset internalVimiumInputHint vimiumInputHint"
# minus 1 for the border
hint.style.left = (tuple.rect.left - 1) + window.scrollX + "px"
hint.style.top = (tuple.rect.top - 1) + window.scrollY + "px"
hint.style.width = tuple.rect.width + "px"
hint.style.height = tuple.rect.height + "px"
hint
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
hintContainingDiv = DomUtils.addElementList(hints,
{ id: "vimiumInputMarkerContainer", className: "vimiumReset" })
handlerStack.push keydown: (event) ->
if event.keyCode == KeyboardUtils.keyCodes.tab
hints[selectedInputIndex].classList.remove 'internalVimiumSelectedInputHint'
if event.shiftKey
if --selectedInputIndex == -1
selectedInputIndex = hints.length - 1
else
if ++selectedInputIndex == hints.length
selectedInputIndex = 0
hints[selectedInputIndex].classList.add 'internalVimiumSelectedInputHint'
visibleInputs[selectedInputIndex].element.focus()
else unless event.keyCode == KeyboardUtils.keyCodes.shiftKey
DomUtils.removeElement hintContainingDiv
@remove()
return true
false
#
# Sends everything except i & ESC to the handler in background_page. i & ESC are special because they control
# insert mode which is local state to the page. The key will be are either a single ascii letter or a
# key-modifier pair, e.g. <c-a> for control a.
#
# Note that some keys will only register keydown events and not keystroke events, e.g. ESC.
#
onKeypress = (event) ->
return unless handlerStack.bubbleEvent('keypress', event)
keyChar = ""
# Ignore modifier keys by themselves.
if (event.keyCode > 31)
keyChar = String.fromCharCode(event.charCode)
# Enter insert mode when the user enables the native find interface.
if (keyChar == "f" && KeyboardUtils.isPrimaryModifierKey(event))
enterInsertModeWithoutShowingIndicator()
return
if (keyChar)
if (findMode)
handleKeyCharForFindMode(keyChar)
DomUtils.suppressEvent(event)
else if (!isInsertMode() && !findMode)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
onKeydown = (event) ->
return unless handlerStack.bubbleEvent('keydown', event)
keyChar = ""
# handle special keys, and normal input keys with modifiers being pressed. don't handle shiftKey alone (to
# avoid / being interpreted as ?
if (((event.metaKey || event.ctrlKey || event.altKey) && event.keyCode > 31) || (
# TODO(philc): some events don't have a keyidentifier. How is that possible?
event.keyIdentifier && event.keyIdentifier.slice(0, 2) != "U+"))
keyChar = KeyboardUtils.getKeyChar(event)
# Again, ignore just modifiers. Maybe this should replace the keyCode>31 condition.
if (keyChar != "")
modifiers = []
if (event.shiftKey)
keyChar = keyChar.toUpperCase()
if (event.metaKey)
modifiers.push("m")
if (event.ctrlKey)
modifiers.push("c")
if (event.altKey)
modifiers.push("a")
for i of modifiers
keyChar = modifiers[i] + "-" + keyChar
if (modifiers.length > 0 || keyChar.length > 1)
keyChar = "<" + keyChar + ">"
if (isInsertMode() && KeyboardUtils.isEscape(event))
# Note that we can't programmatically blur out of Flash embeds from Javascript.
if (!isEmbed(event.srcElement))
# Remove focus so the user can't just get himself back into insert mode by typing in the same input
# box.
if (isEditable(event.srcElement))
event.srcElement.blur()
exitInsertMode()
DomUtils.suppressEvent(event)
else if (findMode)
if (KeyboardUtils.isEscape(event))
handleEscapeForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.backspace || event.keyCode == keyCodes.deleteKey)
handleDeleteForFindMode()
DomUtils.suppressEvent(event)
else if (event.keyCode == keyCodes.enter)
handleEnterForFindMode()
DomUtils.suppressEvent(event)
else if (!modifiers)
event.stopPropagation()
else if (isShowingHelpDialog && KeyboardUtils.isEscape(event))
hideHelpDialog()
else if (!isInsertMode() && !findMode)
if (keyChar)
if (currentCompletionKeys.indexOf(keyChar) != -1)
DomUtils.suppressEvent(event)
keyPort.postMessage({ keyChar:keyChar, frameId:frameId })
else if (KeyboardUtils.isEscape(event))
keyPort.postMessage({ keyChar:"<ESC>", frameId:frameId })
# Added to prevent propagating this event to other listeners if it's one that'll trigger a Vimium command.
# The goal is to avoid the scenario where Google Instant Search uses every keydown event to dump us
# back into the search box. As a side effect, this should also prevent overriding by other sites.
#
# Subject to internationalization issues since we're using keyIdentifier instead of charCode (in keypress).
#
# TOOD(PI:NAME:<NAME>END_PI): Revisit this. Not sure it's the absolute best approach.
if (keyChar == "" && !isInsertMode() &&
(currentCompletionKeys.indexOf(KeyboardUtils.getKeyChar(event)) != -1 ||
isValidFirstKey(KeyboardUtils.getKeyChar(event))))
event.stopPropagation()
onKeyup = (event) -> return unless handlerStack.bubbleEvent('keyup', event)
checkIfEnabledForUrl = ->
url = window.location.toString()
chrome.runtime.sendMessage { handler: "isEnabledForUrl", url: url }, (response) ->
isEnabledForUrl = response.isEnabledForUrl
if (isEnabledForUrl)
initializeWhenEnabled()
else if (HUD.isReady())
# Quickly hide any HUD we might already be showing, e.g. if we entered insert mode on page load.
HUD.hide()
refreshCompletionKeys = (response) ->
if (response)
currentCompletionKeys = response.completionKeys
if (response.validFirstKeys)
validFirstKeys = response.validFirstKeys
else
chrome.runtime.sendMessage({ handler: "getCompletionKeys" }, refreshCompletionKeys)
isValidFirstKey = (keyChar) ->
validFirstKeys[keyChar] || /[PI:KEY:<KEY>END_PI(keyChar)
onFocusCapturePhase = (event) ->
if (isFocusable(event.target) && !findMode)
enterInsertModeWithoutShowingIndicator(event.target)
onBlurCapturePhase = (event) ->
if (isFocusable(event.target))
exitInsertMode(event.target)
#
# Returns true if the element is focusable. This includes embeds like Flash, which steal the keybaord focus.
#
isFocusable = (element) -> isEditable(element) || isEmbed(element)
#
# Embedded elements like Flash and quicktime players can obtain focus but cannot be programmatically
# unfocused.
#
isEmbed = (element) -> ["embed", "object"].indexOf(element.nodeName.toLowerCase()) > 0
#
# Input or text elements are considered focusable and able to receieve their own keyboard events,
# and will enter enter mode if focused. Also note that the "contentEditable" attribute can be set on
# any element which makes it a rich text editor, like the notes on jjot.com.
#
isEditable = (target) ->
return true if target.isContentEditable
nodeName = target.nodeName.toLowerCase()
# use a blacklist instead of a whitelist because new form controls are still being implemented for html5
noFocus = ["radio", "checkbox"]
if (nodeName == "input" && noFocus.indexOf(target.type) == -1)
return true
focusableElements = ["textarea", "select"]
focusableElements.indexOf(nodeName) >= 0
#
# Enters insert mode and show an "Insert mode" message. Showing the UI is only useful when entering insert
# mode manually by pressing "i". In most cases we do not show any UI (enterInsertModeWithoutShowingIndicator)
#
window.enterInsertMode = (target) ->
enterInsertModeWithoutShowingIndicator(target)
HUD.show("Insert mode")
#
# We cannot count on 'focus' and 'blur' events to happen sequentially. For example, if blurring element A
# causes element B to come into focus, we may get "B focus" before "A blur". Thus we only leave insert mode
# when the last editable element that came into focus -- which insertModeLock points to -- has been blurred.
# If insert mode is entered manually (via pressing 'i'), then we set insertModeLock to 'undefined', and only
# leave insert mode when the user presses <ESC>.
#
enterInsertModeWithoutShowingIndicator = (target) -> insertModeLock = target
exitInsertMode = (target) ->
if (target == undefined || insertModeLock == target)
insertModeLock = null
HUD.hide()
isInsertMode = -> insertModeLock != null
# should be called whenever rawQuery is modified.
updateFindModeQuery = ->
# the query can be treated differently (e.g. as a plain string versus regex depending on the presence of
# escape sequences. '\' is the escape character and needs to be escaped itself to be used as a normal
# character. here we grep for the relevant escape sequences.
findModeQuery.isRegex = settings.get 'regexFindMode'
hasNoIgnoreCaseFlag = false
findModeQuery.parsedQuery = findModeQuery.rawQuery.replace /\\./g, (match) ->
switch (match)
when "\\r"
findModeQuery.isRegex = true
return ""
when "\\R"
findModeQuery.isRegex = false
return ""
when "\\I"
hasNoIgnoreCaseFlag = true
return ""
when "\\\\"
return "\\"
else
return match
# default to 'smartcase' mode, unless noIgnoreCase is explicitly specified
findModeQuery.ignoreCase = !hasNoIgnoreCaseFlag && !Utils.hasUpperCase(findModeQuery.parsedQuery)
# if we are dealing with a regex, grep for all matches in the text, and then call window.find() on them
# sequentially so the browser handles the scrolling / text selection.
if findModeQuery.isRegex
try
pattern = new RegExp(findModeQuery.parsedQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
catch error
# if we catch a SyntaxError, assume the user is not done typing yet and return quietly
return
# innerText will not return the text of hidden elements, and strip out tags while preserving newlines
text = document.body.innerText
findModeQuery.regexMatches = text.match(pattern)
findModeQuery.activeRegexIndex = 0
findModeQuery.matchCount = findModeQuery.regexMatches?.length
# if we are doing a basic plain string match, we still want to grep for matches of the string, so we can
# show a the number of results. We can grep on document.body.innerText, as it should be indistinguishable
# from the internal representation used by window.find.
else
# escape all special characters, so RegExp just parses the string 'as is'.
# Taken from http://stackoverflow.com/questions/3446170/escape-string-for-use-in-javascript-regex
escapeRegExp = /[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g
parsedNonRegexQuery = findModeQuery.parsedQuery.replace(escapeRegExp, (char) -> "\\" + char)
pattern = new RegExp(parsedNonRegexQuery, "g" + (if findModeQuery.ignoreCase then "i" else ""))
text = document.body.innerText
findModeQuery.matchCount = text.match(pattern)?.length
handleKeyCharForFindMode = (keyChar) ->
findModeQuery.rawQuery += keyChar
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
handleEscapeForFindMode = ->
exitFindMode()
document.body.classList.remove("vimiumFindMode")
# removing the class does not re-color existing selections. we recreate the current selection so it reverts
# back to the default color.
selection = window.getSelection()
unless selection.isCollapsed
range = window.getSelection().getRangeAt(0)
window.getSelection().removeAllRanges()
window.getSelection().addRange(range)
focusFoundLink() || selectFoundInputElement()
handleDeleteForFindMode = ->
if (findModeQuery.rawQuery.length == 0)
exitFindMode()
performFindInPlace()
else
findModeQuery.rawQuery = findModeQuery.rawQuery.substring(0, findModeQuery.rawQuery.length - 1)
updateFindModeQuery()
performFindInPlace()
showFindModeHUDForQuery()
# <esc> sends us into insert mode if possible, but <cr> does not.
# <esc> corresponds approximately to 'nevermind, I have found it already' while <cr> means 'I want to save
# this query and do more searches with it'
handleEnterForFindMode = ->
exitFindMode()
focusFoundLink()
document.body.classList.add("vimiumFindMode")
settings.set("findModeRawQuery", findModeQuery.rawQuery)
performFindInPlace = ->
cachedScrollX = window.scrollX
cachedScrollY = window.scrollY
query = if findModeQuery.isRegex then getNextQueryFromRegexMatches(0) else findModeQuery.parsedQuery
# Search backwards first to "free up" the current word as eligible for the real forward search. This allows
# us to search in place without jumping around between matches as the query grows.
executeFind(query, { backwards: true, caseSensitive: !findModeQuery.ignoreCase })
# We need to restore the scroll position because we might've lost the right position by searching
# backwards.
window.scrollTo(cachedScrollX, cachedScrollY)
findModeQueryHasResults = executeFind(query, { caseSensitive: !findModeQuery.ignoreCase })
# :options is an optional dict. valid parameters are 'caseSensitive' and 'backwards'.
executeFind = (query, options) ->
options = options || {}
# rather hacky, but this is our way of signalling to the insertMode listener not to react to the focus
# changes that find() induces.
oldFindMode = findMode
findMode = true
document.body.classList.add("vimiumFindMode")
# prevent find from matching its own search query in the HUD
HUD.hide(true)
# ignore the selectionchange event generated by find()
document.removeEventListener("selectionchange",restoreDefaultSelectionHighlight, true)
result = window.find(query, options.caseSensitive, options.backwards, true, false, true, false)
setTimeout(
-> document.addEventListener("selectionchange", restoreDefaultSelectionHighlight, true)
0)
findMode = oldFindMode
# we need to save the anchor node here because <esc> seems to nullify it, regardless of whether we do
# preventDefault()
findModeAnchorNode = document.getSelection().anchorNode
result
restoreDefaultSelectionHighlight = -> document.body.classList.remove("vimiumFindMode")
focusFoundLink = ->
if (findModeQueryHasResults)
link = getLinkFromSelection()
link.focus() if link
isDOMDescendant = (parent, child) ->
node = child
while (node != null)
return true if (node == parent)
node = node.parentNode
false
selectFoundInputElement = ->
# if the found text is in an input element, getSelection().anchorNode will be null, so we use activeElement
# instead. however, since the last focused element might not be the one currently pointed to by find (e.g.
# the current one might be disabled and therefore unable to receive focus), we use the approximate
# heuristic of checking that the last anchor node is an ancestor of our element.
if (findModeQueryHasResults && document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement))
DomUtils.simulateSelect(document.activeElement)
# the element has already received focus via find(), so invoke insert mode manually
enterInsertModeWithoutShowingIndicator(document.activeElement)
getNextQueryFromRegexMatches = (stepSize) ->
# find()ing an empty query always returns false
return "" unless findModeQuery.regexMatches
totalMatches = findModeQuery.regexMatches.length
findModeQuery.activeRegexIndex += stepSize + totalMatches
findModeQuery.activeRegexIndex %= totalMatches
findModeQuery.regexMatches[findModeQuery.activeRegexIndex]
findAndFocus = (backwards) ->
# check if the query has been changed by a script in another frame
mostRecentQuery = settings.get("findModeRawQuery") || ""
if (mostRecentQuery != findModeQuery.rawQuery)
findModeQuery.rawQuery = mostRecentQuery
updateFindModeQuery()
query =
if findModeQuery.isRegex
getNextQueryFromRegexMatches(if backwards then -1 else 1)
else
findModeQuery.parsedQuery
findModeQueryHasResults =
executeFind(query, { backwards: backwards, caseSensitive: !findModeQuery.ignoreCase })
if (!findModeQueryHasResults)
HUD.showForDuration("No matches for '" + findModeQuery.rawQuery + "'", 1000)
return
# if we have found an input element via 'n', pressing <esc> immediately afterwards sends us into insert
# mode
elementCanTakeInput = document.activeElement &&
DomUtils.isSelectable(document.activeElement) &&
isDOMDescendant(findModeAnchorNode, document.activeElement)
if (elementCanTakeInput)
handlerStack.push({
keydown: (event) ->
@remove()
if (KeyboardUtils.isEscape(event))
DomUtils.simulateSelect(document.activeElement)
enterInsertModeWithoutShowingIndicator(document.activeElement)
return false # we have "consumed" this event, so do not propagate
return true
})
focusFoundLink()
window.performFind = -> findAndFocus()
window.performBackwardsFind = -> findAndFocus(true)
getLinkFromSelection = ->
node = window.getSelection().anchorNode
while (node && node != document.body)
return node if (node.nodeName.toLowerCase() == "a")
node = node.parentNode
null
# used by the findAndFollow* functions.
followLink = (linkElement) ->
if (linkElement.nodeName.toLowerCase() == "link")
window.location.href = linkElement.href
else
# if we can click on it, don't simply set location.href: some next/prev links are meant to trigger AJAX
# calls, like the 'more' button on GitHub's newsfeed.
linkElement.scrollIntoView()
linkElement.focus()
DomUtils.simulateClick(linkElement)
#
# Find and follow a link which matches any one of a list of strings. If there are multiple such links, they
# are prioritized for shortness, by their position in :linkStrings, how far down the page they are located,
# and finally by whether the match is exact. Practically speaking, this means we favor 'next page' over 'the
# next big thing', and 'more' over 'nextcompany', even if 'next' occurs before 'more' in :linkStrings.
#
findAndFollowLink = (linkStrings) ->
linksXPath = DomUtils.makeXPath(["a", "*[@onclick or @role='link' or contains(@class, 'button')]"])
links = DomUtils.evaluateXPath(linksXPath, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE)
candidateLinks = []
# at the end of this loop, candidateLinks will contain all visible links that match our patterns
# links lower in the page are more likely to be the ones we want, so we loop through the snapshot backwards
for i in [(links.snapshotLength - 1)..0] by -1
link = links.snapshotItem(i)
# ensure link is visible (we don't mind if it is scrolled offscreen)
boundingClientRect = link.getBoundingClientRect()
if (boundingClientRect.width == 0 || boundingClientRect.height == 0)
continue
computedStyle = window.getComputedStyle(link, null)
if (computedStyle.getPropertyValue("visibility") != "visible" ||
computedStyle.getPropertyValue("display") == "none")
continue
linkMatches = false
for linkString in linkStrings
if (link.innerText.toLowerCase().indexOf(linkString) != -1)
linkMatches = true
break
continue unless linkMatches
candidateLinks.push(link)
return if (candidateLinks.length == 0)
for link in candidateLinks
link.wordCount = link.innerText.trim().split(/\s+/).length
# We can use this trick to ensure that Array.sort is stable. We need this property to retain the reverse
# in-page order of the links.
candidateLinks.forEach((a,i) -> a.originalIndex = i)
# favor shorter links, and ignore those that are more than one word longer than the shortest link
candidateLinks =
candidateLinks
.sort((a, b) ->
if (a.wordCount == b.wordCount) then a.originalIndex - b.originalIndex else a.wordCount - b.wordCount
)
.filter((a) -> a.wordCount <= candidateLinks[0].wordCount + 1)
for linkString in linkStrings
exactWordRegex =
if /\b/.test(linkString[0]) or /\b/.test(linkString[linkString.length - 1])
new RegExp "\\b" + linkString + "\\b", "i"
else
new RegExp linkString, "i"
for candidateLink in candidateLinks
if (exactWordRegex.test(candidateLink.innerText))
followLink(candidateLink)
return true
false
findAndFollowRel = (value) ->
relTags = ["link", "a", "area"]
for tag in relTags
elements = document.getElementsByTagName(tag)
for element in elements
if (element.hasAttribute("rel") && element.rel == value)
followLink(element)
return true
window.goPrevious = ->
previousPatterns = settings.get("previousPatterns") || ""
previousStrings = previousPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("prev") || findAndFollowLink(previousStrings)
window.goNext = ->
nextPatterns = settings.get("nextPatterns") || ""
nextStrings = nextPatterns.split(",").filter( (s) -> s.trim().length )
findAndFollowRel("next") || findAndFollowLink(nextStrings)
showFindModeHUDForQuery = ->
if (findModeQueryHasResults || findModeQuery.parsedQuery.length == 0)
HUD.show("/" + findModeQuery.rawQuery + " (" + findModeQuery.matchCount + " Matches)")
else
HUD.show("/" + findModeQuery.rawQuery + " (No Matches)")
window.enterFindMode = ->
findModeQuery = { rawQuery: "" }
findMode = true
HUD.show("/")
exitFindMode = ->
findMode = false
HUD.hide()
window.showHelpDialog = (html, fid) ->
return if (isShowingHelpDialog || !document.body || fid != frameId)
isShowingHelpDialog = true
container = document.createElement("div")
container.id = "vimiumHelpDialogContainer"
container.className = "vimiumReset"
document.body.appendChild(container)
container.innerHTML = html
container.getElementsByClassName("closeButton")[0].addEventListener("click", hideHelpDialog, false)
VimiumHelpDialog =
# This setting is pulled out of local storage. It's false by default.
getShowAdvancedCommands: -> settings.get("helpDialog_showAdvancedCommands")
init: () ->
this.dialogElement = document.getElementById("vimiumHelpDialog")
this.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].addEventListener("click",
VimiumHelpDialog.toggleAdvancedCommands, false)
this.dialogElement.style.maxHeight = window.innerHeight - 80
this.showAdvancedCommands(this.getShowAdvancedCommands())
#
# Advanced commands are hidden by default so they don't overwhelm new and casual users.
#
toggleAdvancedCommands: (event) ->
event.preventDefault()
showAdvanced = VimiumHelpDialog.getShowAdvancedCommands()
VimiumHelpDialog.showAdvancedCommands(!showAdvanced)
settings.set("helpDialog_showAdvancedCommands", !showAdvanced)
showAdvancedCommands: (visible) ->
VimiumHelpDialog.dialogElement.getElementsByClassName("toggleAdvancedCommands")[0].innerHTML =
if visible then "Hide advanced commands" else "Show advanced commands"
advancedEls = VimiumHelpDialog.dialogElement.getElementsByClassName("advanced")
for el in advancedEls
el.style.display = if visible then "table-row" else "none"
VimiumHelpDialog.init()
container.getElementsByClassName("optionsPage")[0].addEventListener("click", (clickEvent) ->
clickEvent.preventDefault()
chrome.runtime.sendMessage({handler: "openOptionsPageInNewTab"})
false)
hideHelpDialog = (clickEvent) ->
isShowingHelpDialog = false
helpDialog = document.getElementById("vimiumHelpDialogContainer")
if (helpDialog)
helpDialog.parentNode.removeChild(helpDialog)
if (clickEvent)
clickEvent.preventDefault()
toggleHelpDialog = (html, fid) ->
if (isShowingHelpDialog)
hideHelpDialog()
else
showHelpDialog(html, fid)
#
# A heads-up-display (HUD) for showing Vimium page operations.
# Note: you cannot interact with the HUD until document.body is available.
#
HUD =
_tweenId: -1
_displayElement: null
_upgradeNotificationElement: null
# This HUD is styled to precisely mimick the chrome HUD on Mac. Use the "has_popup_and_link_hud.html"
# test harness to tweak these styles to match Chrome's. One limitation of our HUD display is that
# it doesn't sit on top of horizontal scrollbars like Chrome's HUD does.
showForDuration: (text, duration) ->
HUD.show(text)
HUD._showForDurationTimerId = setTimeout((-> HUD.hide()), duration)
show: (text) ->
return unless HUD.enabled()
clearTimeout(HUD._showForDurationTimerId)
HUD.displayElement().innerText = text
clearInterval(HUD._tweenId)
HUD._tweenId = Tween.fade(HUD.displayElement(), 1.0, 150)
HUD.displayElement().style.display = ""
showUpgradeNotification: (version) ->
HUD.upgradeNotificationElement().innerHTML = "Vimium has been updated to
<a class='vimiumReset'
href='https://chrome.google.com/extensions/detail/dbepggeogbaibhgnhhndojpepiihcmeb'>
#{version}</a>.<a class='vimiumReset close-button' href='#'>x</a>"
links = HUD.upgradeNotificationElement().getElementsByTagName("a")
links[0].addEventListener("click", HUD.onUpdateLinkClicked, false)
links[1].addEventListener "click", (event) ->
event.preventDefault()
HUD.onUpdateLinkClicked()
Tween.fade(HUD.upgradeNotificationElement(), 1.0, 150)
onUpdateLinkClicked: (event) ->
HUD.hideUpgradeNotification()
chrome.runtime.sendMessage({ handler: "upgradeNotificationClosed" })
hideUpgradeNotification: (clickEvent) ->
Tween.fade(HUD.upgradeNotificationElement(), 0, 150,
-> HUD.upgradeNotificationElement().style.display = "none")
#
# Retrieves the HUD HTML element.
#
displayElement: ->
if (!HUD._displayElement)
HUD._displayElement = HUD.createHudElement()
# Keep this far enough to the right so that it doesn't collide with the "popups blocked" chrome HUD.
HUD._displayElement.style.right = "150px"
HUD._displayElement
upgradeNotificationElement: ->
if (!HUD._upgradeNotificationElement)
HUD._upgradeNotificationElement = HUD.createHudElement()
# Position this just to the left of our normal HUD.
HUD._upgradeNotificationElement.style.right = "315px"
HUD._upgradeNotificationElement
createHudElement: ->
element = document.createElement("div")
element.className = "vimiumReset vimiumHUD"
document.body.appendChild(element)
element
hide: (immediate) ->
clearInterval(HUD._tweenId)
if (immediate)
HUD.displayElement().style.display = "none"
else
HUD._tweenId = Tween.fade(HUD.displayElement(), 0, 150,
-> HUD.displayElement().style.display = "none")
isReady: -> document.body != null
# A preference which can be toggled in the Options page. */
enabled: -> !settings.get("hideHud")
Tween =
#
# Fades an element's alpha. Returns a timer ID which can be used to stop the tween via clearInterval.
#
fade: (element, toAlpha, duration, onComplete) ->
state = {}
state.duration = duration
state.startTime = (new Date()).getTime()
state.from = parseInt(element.style.opacity) || 0
state.to = toAlpha
state.onUpdate = (value) ->
element.style.opacity = value
if (value == state.to && onComplete)
onComplete()
state.timerId = setInterval((-> Tween.performTweenStep(state)), 50)
state.timerId
performTweenStep: (state) ->
elapsed = (new Date()).getTime() - state.startTime
if (elapsed >= state.duration)
clearInterval(state.timerId)
state.onUpdate(state.to)
else
value = (elapsed / state.duration) * (state.to - state.from) + state.from
state.onUpdate(value)
initializePreDomReady()
window.addEventListener("DOMContentLoaded", initializeOnDomReady)
window.onbeforeunload = ->
chrome.runtime.sendMessage(
handler: "updateScrollPosition"
scrollX: window.scrollX
scrollY: window.scrollY)
root = exports ? window
root.settings = settings
root.HUD = HUD
root.handlerStack = handlerStack
root.frameId = frameId
|
[
{
"context": ": \"https://www.google.com/maps/embed/v1/place?key=AIzaSyAG-ySQ04kts2AnablMVVkD32vDu74jJ_4&q=#{@props.event.street + @props.event.city + @pr",
"end": 1266,
"score": 0.9994717240333557,
"start": 1227,
"tag": "KEY",
"value": "AIzaSyAG-ySQ04kts2AnablMVVkD32vDu74jJ_4"
}
] | app/assets/javascripts/components/event.js.coffee | musicionary/erefuge | 5 | @Event = React.createClass
getInitialState: ->
edit: false
handleToggle: (e) ->
e.preventDefault()
@setState edit: !@state.edit
handleEdit: (e) ->
e.preventDefault()
data =
name: this.refs.name.value
date: this.refs.date.value
description: this.refs.description.value
street: this.refs.street.value
city: this.refs.city.value
state: this.refs.state.value
zip_code: this.refs.zip_code.value
$.ajax
method: "PUT"
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
data:
event: data
success: (data) =>
@setState edit: false
@props.handleEditEvent @props.event, data
handleDelete: (e) ->
e.preventDefault()
$.ajax
method: 'DELETE'
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
success: () =>
@props.handleDeleteEvent @props.event
eventRow: ->
React.DOM.div
className: "col s12 l6"
React.DOM.div
className: "card"
React.DOM.div
className: "card-image waves-effect waves-block waves-light"
React.DOM.iframe
className: ""
src: "https://www.google.com/maps/embed/v1/place?key=AIzaSyAG-ySQ04kts2AnablMVVkD32vDu74jJ_4&q=#{@props.event.street + @props.event.city + @props.event.state + @props.event.zip_code}"
React.DOM.div
className: "card-content"
React.DOM.span
className: "card-title activator grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"more_vert"
React.DOM.hr null
React.DOM.p
React.DOM.a
href: "#"
#make this "add to your events function"
"This is a link"
React.DOM.div
className: "card-reveal"
React.DOM.span
className: "card-title grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"close"
React.DOM.p
@props.event.description
React.DOM.p null, @props.event.date
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"mode_edit"
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleDelete
React.DOM.i
className: 'material-icons'
"delete_forever"
eventForm: ->
React.DOM.div
className: "col s12 m6"
React.DOM.div
className: "card"
React.DOM.div
className: "row"
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.name
ref: 'name'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "datetime-local"
defaultValue: @props.event.date
ref: 'date'
React.DOM.br
React.DOM.div
className: "input-field col s12"
React.DOM.textarea
className: "materialize-textarea"
type: "text"
defaultValue: @props.event.description
ref: 'description'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.street
ref: 'street'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.city
ref: 'city'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.state
ref: 'state'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.zip_code
ref: 'zip_code'
React.DOM.div
className: "row container"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleEdit
React.DOM.i
className: 'material-icons'
"done"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"clear"
render: ->
if @state.edit
@eventForm()
else
@eventRow()
| 59575 | @Event = React.createClass
getInitialState: ->
edit: false
handleToggle: (e) ->
e.preventDefault()
@setState edit: !@state.edit
handleEdit: (e) ->
e.preventDefault()
data =
name: this.refs.name.value
date: this.refs.date.value
description: this.refs.description.value
street: this.refs.street.value
city: this.refs.city.value
state: this.refs.state.value
zip_code: this.refs.zip_code.value
$.ajax
method: "PUT"
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
data:
event: data
success: (data) =>
@setState edit: false
@props.handleEditEvent @props.event, data
handleDelete: (e) ->
e.preventDefault()
$.ajax
method: 'DELETE'
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
success: () =>
@props.handleDeleteEvent @props.event
eventRow: ->
React.DOM.div
className: "col s12 l6"
React.DOM.div
className: "card"
React.DOM.div
className: "card-image waves-effect waves-block waves-light"
React.DOM.iframe
className: ""
src: "https://www.google.com/maps/embed/v1/place?key=<KEY>&q=#{@props.event.street + @props.event.city + @props.event.state + @props.event.zip_code}"
React.DOM.div
className: "card-content"
React.DOM.span
className: "card-title activator grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"more_vert"
React.DOM.hr null
React.DOM.p
React.DOM.a
href: "#"
#make this "add to your events function"
"This is a link"
React.DOM.div
className: "card-reveal"
React.DOM.span
className: "card-title grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"close"
React.DOM.p
@props.event.description
React.DOM.p null, @props.event.date
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"mode_edit"
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleDelete
React.DOM.i
className: 'material-icons'
"delete_forever"
eventForm: ->
React.DOM.div
className: "col s12 m6"
React.DOM.div
className: "card"
React.DOM.div
className: "row"
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.name
ref: 'name'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "datetime-local"
defaultValue: @props.event.date
ref: 'date'
React.DOM.br
React.DOM.div
className: "input-field col s12"
React.DOM.textarea
className: "materialize-textarea"
type: "text"
defaultValue: @props.event.description
ref: 'description'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.street
ref: 'street'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.city
ref: 'city'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.state
ref: 'state'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.zip_code
ref: 'zip_code'
React.DOM.div
className: "row container"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleEdit
React.DOM.i
className: 'material-icons'
"done"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"clear"
render: ->
if @state.edit
@eventForm()
else
@eventRow()
| true | @Event = React.createClass
getInitialState: ->
edit: false
handleToggle: (e) ->
e.preventDefault()
@setState edit: !@state.edit
handleEdit: (e) ->
e.preventDefault()
data =
name: this.refs.name.value
date: this.refs.date.value
description: this.refs.description.value
street: this.refs.street.value
city: this.refs.city.value
state: this.refs.state.value
zip_code: this.refs.zip_code.value
$.ajax
method: "PUT"
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
data:
event: data
success: (data) =>
@setState edit: false
@props.handleEditEvent @props.event, data
handleDelete: (e) ->
e.preventDefault()
$.ajax
method: 'DELETE'
url: "/events/#{ @props.event.id }"
dataType: 'JSON'
success: () =>
@props.handleDeleteEvent @props.event
eventRow: ->
React.DOM.div
className: "col s12 l6"
React.DOM.div
className: "card"
React.DOM.div
className: "card-image waves-effect waves-block waves-light"
React.DOM.iframe
className: ""
src: "https://www.google.com/maps/embed/v1/place?key=PI:KEY:<KEY>END_PI&q=#{@props.event.street + @props.event.city + @props.event.state + @props.event.zip_code}"
React.DOM.div
className: "card-content"
React.DOM.span
className: "card-title activator grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"more_vert"
React.DOM.hr null
React.DOM.p
React.DOM.a
href: "#"
#make this "add to your events function"
"This is a link"
React.DOM.div
className: "card-reveal"
React.DOM.span
className: "card-title grey-text text-darken-4"
@props.event.name
React.DOM.i
className: "material-icons right"
"close"
React.DOM.p
@props.event.description
React.DOM.p null, @props.event.date
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"mode_edit"
React.DOM.a
className: 'waves-effect waves-light btn'
onClick: @handleDelete
React.DOM.i
className: 'material-icons'
"delete_forever"
eventForm: ->
React.DOM.div
className: "col s12 m6"
React.DOM.div
className: "card"
React.DOM.div
className: "row"
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.name
ref: 'name'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "datetime-local"
defaultValue: @props.event.date
ref: 'date'
React.DOM.br
React.DOM.div
className: "input-field col s12"
React.DOM.textarea
className: "materialize-textarea"
type: "text"
defaultValue: @props.event.description
ref: 'description'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.street
ref: 'street'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.city
ref: 'city'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.state
ref: 'state'
React.DOM.div
className: "input-field col s12"
React.DOM.input
className: ""
type: "text"
defaultValue: @props.event.zip_code
ref: 'zip_code'
React.DOM.div
className: "row container"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleEdit
React.DOM.i
className: 'material-icons'
"done"
React.DOM.a
className: 'col s6 waves-effect waves-light btn'
onClick: @handleToggle
React.DOM.i
className: 'material-icons'
"clear"
render: ->
if @state.edit
@eventForm()
else
@eventRow()
|
[
{
"context": "/maps/api/geocode/json'\n data:\n key: 'AIzaSyA1Bu8-7_MDfT9oncmZ1KCjoz_2xdrTMFA'\n latlng: coordinates).done (data) ->\n ",
"end": 419,
"score": 0.9997621178627014,
"start": 380,
"tag": "KEY",
"value": "AIzaSyA1Bu8-7_MDfT9oncmZ1KCjoz_2xdrTMFA"
}
] | weather.coffee | Magnum26/weather | 0 | $('document').ready ->
`var skycons`
error = (err) ->
document.getElementById('loading').innerHTML = 'ERROR(' + err.code + '): ' + err.message
return
success = (pos) ->
crd = pos.coords
coordinates = crd.latitude + ',' + crd.longitude
$.ajax(
method: 'GET'
url: 'https://maps.googleapis.com/maps/api/geocode/json'
data:
key: 'AIzaSyA1Bu8-7_MDfT9oncmZ1KCjoz_2xdrTMFA'
latlng: coordinates).done (data) ->
document.getElementById('currently-location').innerHTML = data.results['0'].address_components[3].short_name + ', ' + data.results['0'].address_components[5].short_name
return
# Make API call to darksky to get weather temp and description
$.ajax(
method: 'GET'
dataType: 'jsonp'
url: "https://api.darksky.net/forecast/907160cda3c3fe28b8a7a2525b827ecc/" + coordinates + '?units=uk2'
).done (msg) ->
skycons.add document.getElementById('currently-icon'), msg.currently.icon
skycons.play()
roundTemp = parseFloat(msg.currently.temperature).toFixed(0)
document.getElementById('currently-summary').innerHTML = msg.currently.summary
document.getElementById('minutely-summary').innerHTML = msg.minutely.summary
document.getElementById('hourly-summary').innerHTML = msg.hourly.summary
document.getElementById('daily-summary').innerHTML = msg.daily.summary
document.getElementById('currently-temperature').innerHTML = roundTemp
document.getElementById('loading').style.display = 'none'
# Set some global variables that are used throughout
coordinates = undefined
roundTemp = '0'
skycons = new Skycons('color': 'black')
# Check if geolocation is available in browser
if 'geolocation' of navigator
options =
enableHighAccuracy: true
timeout: 5000
maximumAge: 0
# Gets position from browser with multiple callbacks to handle errors
navigator.geolocation.getCurrentPosition success, error, options
else
### geolocation IS NOT available ###
alert 'Sorry, your browser isn\'t providing a location.'
return
| 50066 | $('document').ready ->
`var skycons`
error = (err) ->
document.getElementById('loading').innerHTML = 'ERROR(' + err.code + '): ' + err.message
return
success = (pos) ->
crd = pos.coords
coordinates = crd.latitude + ',' + crd.longitude
$.ajax(
method: 'GET'
url: 'https://maps.googleapis.com/maps/api/geocode/json'
data:
key: '<KEY>'
latlng: coordinates).done (data) ->
document.getElementById('currently-location').innerHTML = data.results['0'].address_components[3].short_name + ', ' + data.results['0'].address_components[5].short_name
return
# Make API call to darksky to get weather temp and description
$.ajax(
method: 'GET'
dataType: 'jsonp'
url: "https://api.darksky.net/forecast/907160cda3c3fe28b8a7a2525b827ecc/" + coordinates + '?units=uk2'
).done (msg) ->
skycons.add document.getElementById('currently-icon'), msg.currently.icon
skycons.play()
roundTemp = parseFloat(msg.currently.temperature).toFixed(0)
document.getElementById('currently-summary').innerHTML = msg.currently.summary
document.getElementById('minutely-summary').innerHTML = msg.minutely.summary
document.getElementById('hourly-summary').innerHTML = msg.hourly.summary
document.getElementById('daily-summary').innerHTML = msg.daily.summary
document.getElementById('currently-temperature').innerHTML = roundTemp
document.getElementById('loading').style.display = 'none'
# Set some global variables that are used throughout
coordinates = undefined
roundTemp = '0'
skycons = new Skycons('color': 'black')
# Check if geolocation is available in browser
if 'geolocation' of navigator
options =
enableHighAccuracy: true
timeout: 5000
maximumAge: 0
# Gets position from browser with multiple callbacks to handle errors
navigator.geolocation.getCurrentPosition success, error, options
else
### geolocation IS NOT available ###
alert 'Sorry, your browser isn\'t providing a location.'
return
| true | $('document').ready ->
`var skycons`
error = (err) ->
document.getElementById('loading').innerHTML = 'ERROR(' + err.code + '): ' + err.message
return
success = (pos) ->
crd = pos.coords
coordinates = crd.latitude + ',' + crd.longitude
$.ajax(
method: 'GET'
url: 'https://maps.googleapis.com/maps/api/geocode/json'
data:
key: 'PI:KEY:<KEY>END_PI'
latlng: coordinates).done (data) ->
document.getElementById('currently-location').innerHTML = data.results['0'].address_components[3].short_name + ', ' + data.results['0'].address_components[5].short_name
return
# Make API call to darksky to get weather temp and description
$.ajax(
method: 'GET'
dataType: 'jsonp'
url: "https://api.darksky.net/forecast/907160cda3c3fe28b8a7a2525b827ecc/" + coordinates + '?units=uk2'
).done (msg) ->
skycons.add document.getElementById('currently-icon'), msg.currently.icon
skycons.play()
roundTemp = parseFloat(msg.currently.temperature).toFixed(0)
document.getElementById('currently-summary').innerHTML = msg.currently.summary
document.getElementById('minutely-summary').innerHTML = msg.minutely.summary
document.getElementById('hourly-summary').innerHTML = msg.hourly.summary
document.getElementById('daily-summary').innerHTML = msg.daily.summary
document.getElementById('currently-temperature').innerHTML = roundTemp
document.getElementById('loading').style.display = 'none'
# Set some global variables that are used throughout
coordinates = undefined
roundTemp = '0'
skycons = new Skycons('color': 'black')
# Check if geolocation is available in browser
if 'geolocation' of navigator
options =
enableHighAccuracy: true
timeout: 5000
maximumAge: 0
# Gets position from browser with multiple callbacks to handle errors
navigator.geolocation.getCurrentPosition success, error, options
else
### geolocation IS NOT available ###
alert 'Sorry, your browser isn\'t providing a location.'
return
|
[
{
"context": "# Slider\n#\n# Copyright (c) 2011 TechnoGate <support@technogate.fr>\n#\n# Permission is hereby ",
"end": 42,
"score": 0.9979022145271301,
"start": 32,
"tag": "USERNAME",
"value": "TechnoGate"
},
{
"context": "# Slider\n#\n# Copyright (c) 2011 TechnoGate <support@techno... | lib/assets/javascripts/slider.js.coffee | TechnoGate/contao_template | 0 | # Slider
#
# Copyright (c) 2011 TechnoGate <support@technogate.fr>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# XXX: Add docs
window.Slider = class Slider
constructor: (@options) ->
if ($ @options.dom).length <= 0
throw new Error "The dom #{@options.dom} does not exist."
@slider = ($ @options.dom)
@tray = @slider.find('ul').first()
@item_count = @tray.find('li').length
@item_width = @tray.find('li').first().outerWidth(true)
@item_height = @tray.find('li').first().outerHeight(true)
@items = @tray.find('li')
@display = @options.display or 1
@step = @options.step or 1
@tray.addClass 'slider_tray'
@create_arrows() if @options.controls
@tray.wrap "<div class='slider_window' />"
@window = @tray.parent()
@set_css()
@slided = 0
left_arrow: -> @slider.find('.left_arrow').first()
right_arrow: -> @slider.find('.right_arrow').first()
arrow_html: (direction) ->
"<a href='#'class='#{direction}_arrow'></a>"
wrapper_html: ->
create_arrows: ->
@tray.before @arrow_html 'left'
@tray.after @arrow_html 'right'
@left_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'left' if !@left_arrow().hasClass 'disabled'
@right_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'right' if !@right_arrow().hasClass 'disabled'
set_css: ->
@window.css
width: "#{@options.width}px"
height: "#{@options.height}px"
position: 'relative'
overflow: 'hidden'
float: 'left'
@tray.css
width: "#{@item_width * @item_count}px"
height: "#{@options.height}px"
display: 'block'
position: 'relative'
overflow: 'hidden'
'list-style': 'none'
margin: 0
padding: 0
@items.css
position: 'relative'
display: 'list-item'
float: 'left'
overflow: 'hidden'
@left_arrow().addClass 'disabled'
@left_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
@right_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
calculate_left_value: (direction, value) ->
value = parseInt(value) or 0
if direction == 'left'
@slided -= @step
value + (@step * @item_width)
else
@slided += @step
value - (@step * @item_width)
handle_arrow_event: (direction) ->
@tray.animate {'left': (@calculate_left_value direction, (@tray.css 'left'))},
duration: @options.duration or 500
if @slided <= 0
@left_arrow().addClass 'disabled'
@right_arrow().removeClass 'disabled'
else if @slided >= @item_count - @display
@right_arrow().addClass 'disabled'
@left_arrow().removeClass 'disabled'
else
@left_arrow().removeClass 'disabled'
@right_arrow().removeClass 'disabled'
| 125621 | # Slider
#
# Copyright (c) 2011 TechnoGate <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# XXX: Add docs
window.Slider = class Slider
constructor: (@options) ->
if ($ @options.dom).length <= 0
throw new Error "The dom #{@options.dom} does not exist."
@slider = ($ @options.dom)
@tray = @slider.find('ul').first()
@item_count = @tray.find('li').length
@item_width = @tray.find('li').first().outerWidth(true)
@item_height = @tray.find('li').first().outerHeight(true)
@items = @tray.find('li')
@display = @options.display or 1
@step = @options.step or 1
@tray.addClass 'slider_tray'
@create_arrows() if @options.controls
@tray.wrap "<div class='slider_window' />"
@window = @tray.parent()
@set_css()
@slided = 0
left_arrow: -> @slider.find('.left_arrow').first()
right_arrow: -> @slider.find('.right_arrow').first()
arrow_html: (direction) ->
"<a href='#'class='#{direction}_arrow'></a>"
wrapper_html: ->
create_arrows: ->
@tray.before @arrow_html 'left'
@tray.after @arrow_html 'right'
@left_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'left' if !@left_arrow().hasClass 'disabled'
@right_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'right' if !@right_arrow().hasClass 'disabled'
set_css: ->
@window.css
width: "#{@options.width}px"
height: "#{@options.height}px"
position: 'relative'
overflow: 'hidden'
float: 'left'
@tray.css
width: "#{@item_width * @item_count}px"
height: "#{@options.height}px"
display: 'block'
position: 'relative'
overflow: 'hidden'
'list-style': 'none'
margin: 0
padding: 0
@items.css
position: 'relative'
display: 'list-item'
float: 'left'
overflow: 'hidden'
@left_arrow().addClass 'disabled'
@left_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
@right_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
calculate_left_value: (direction, value) ->
value = parseInt(value) or 0
if direction == 'left'
@slided -= @step
value + (@step * @item_width)
else
@slided += @step
value - (@step * @item_width)
handle_arrow_event: (direction) ->
@tray.animate {'left': (@calculate_left_value direction, (@tray.css 'left'))},
duration: @options.duration or 500
if @slided <= 0
@left_arrow().addClass 'disabled'
@right_arrow().removeClass 'disabled'
else if @slided >= @item_count - @display
@right_arrow().addClass 'disabled'
@left_arrow().removeClass 'disabled'
else
@left_arrow().removeClass 'disabled'
@right_arrow().removeClass 'disabled'
| true | # Slider
#
# Copyright (c) 2011 TechnoGate <PI:EMAIL:<EMAIL>END_PI>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# XXX: Add docs
window.Slider = class Slider
constructor: (@options) ->
if ($ @options.dom).length <= 0
throw new Error "The dom #{@options.dom} does not exist."
@slider = ($ @options.dom)
@tray = @slider.find('ul').first()
@item_count = @tray.find('li').length
@item_width = @tray.find('li').first().outerWidth(true)
@item_height = @tray.find('li').first().outerHeight(true)
@items = @tray.find('li')
@display = @options.display or 1
@step = @options.step or 1
@tray.addClass 'slider_tray'
@create_arrows() if @options.controls
@tray.wrap "<div class='slider_window' />"
@window = @tray.parent()
@set_css()
@slided = 0
left_arrow: -> @slider.find('.left_arrow').first()
right_arrow: -> @slider.find('.right_arrow').first()
arrow_html: (direction) ->
"<a href='#'class='#{direction}_arrow'></a>"
wrapper_html: ->
create_arrows: ->
@tray.before @arrow_html 'left'
@tray.after @arrow_html 'right'
@left_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'left' if !@left_arrow().hasClass 'disabled'
@right_arrow().bind 'click', (event) =>
event.preventDefault()
@handle_arrow_event 'right' if !@right_arrow().hasClass 'disabled'
set_css: ->
@window.css
width: "#{@options.width}px"
height: "#{@options.height}px"
position: 'relative'
overflow: 'hidden'
float: 'left'
@tray.css
width: "#{@item_width * @item_count}px"
height: "#{@options.height}px"
display: 'block'
position: 'relative'
overflow: 'hidden'
'list-style': 'none'
margin: 0
padding: 0
@items.css
position: 'relative'
display: 'list-item'
float: 'left'
overflow: 'hidden'
@left_arrow().addClass 'disabled'
@left_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
@right_arrow().css
'margin-top': "#{@item_height / 2}px"
float: 'left'
display: 'block'
calculate_left_value: (direction, value) ->
value = parseInt(value) or 0
if direction == 'left'
@slided -= @step
value + (@step * @item_width)
else
@slided += @step
value - (@step * @item_width)
handle_arrow_event: (direction) ->
@tray.animate {'left': (@calculate_left_value direction, (@tray.css 'left'))},
duration: @options.duration or 500
if @slided <= 0
@left_arrow().addClass 'disabled'
@right_arrow().removeClass 'disabled'
else if @slided >= @item_count - @display
@right_arrow().addClass 'disabled'
@left_arrow().removeClass 'disabled'
else
@left_arrow().removeClass 'disabled'
@right_arrow().removeClass 'disabled'
|
[
{
"context": "rder: 'myOrder'\n filters:\n key1: 'value1'\n key2: 'value2'\n key3:\n ",
"end": 8445,
"score": 0.9654623866081238,
"start": 8439,
"tag": "KEY",
"value": "value1"
},
{
"context": "ilters:\n key1: 'value1'\n key2:... | spec/loaders/abstract-loader-shared-behavior.coffee | mavenlink/brainstem-js | 8 | _ = require 'underscore'
$ = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $ # TODO remove after upgrading to backbone 1.2+
StorageManager = require '../../src/storage-manager'
Tasks = require '../helpers/models/tasks'
registerSharedBehavior "AbstractLoaderSharedBehavior", (sharedContext) ->
loader = loaderClass = null
beforeEach ->
loaderClass = sharedContext.loaderClass
fakeNestedInclude = ['parent', { project: ['participants'] }, { assignees: ['something_else'] }]
defaultLoadOptions = ->
name: 'tasks'
createLoader = (opts = {}) ->
storageManager = StorageManager.get()
storageManager.addCollection('tasks', Tasks)
defaults =
storageManager: storageManager
loader = new loaderClass(_.extend {}, defaults, opts)
loader._getCollectionName = -> 'tasks'
loader._createObjects = ->
@internalObject = bar: 'foo'
@externalObject = foo: 'bar'
loader._getModelsForAssociation = -> [{ id: 5 }, { id: 2 }, { id: 1 }, { id: 4 }, { id: 1 }, [{ id: 6 }], { id: null }]
loader._getModel = -> Tasks::model
loader._updateStorageManagerFromResponse = jasmine.createSpy()
loader._updateObjects = (obj, data, silent) ->
obj.setLoaded true unless silent
spyOn(loader, '_updateObjects')
loader
describe '#constructor', ->
it 'saves off a reference to the passed in StorageManager', ->
storageManager = StorageManager.get()
loader = createLoader(storageManager: storageManager)
expect(loader.storageManager).toEqual storageManager
it 'creates a deferred object and turns the loader into a promise', ->
spy = jasmine.createSpy('promise spy')
loader = createLoader()
expect(loader._deferred).not.toBeUndefined()
loader.then(spy)
loader._deferred.resolve()
expect(spy).toHaveBeenCalled()
describe 'options.loadOptions', ->
it 'calls #setup with loadOptions if loadOptions were passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader(loadOptions: defaultLoadOptions())
expect(spy).toHaveBeenCalledWith defaultLoadOptions()
it 'does not call #setup if loadOptions were not passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader()
expect(spy).not.toHaveBeenCalled()
describe '#setup', ->
it 'calls #_parseLoadOptions with the loadOptions', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
opts = foo: 'bar'
loader.setup(opts)
expect(loader._parseLoadOptions).toHaveBeenCalledWith(opts)
it 'calls _createObjects', ->
loader = createLoader()
spyOn(loader, '_createObjects')
loader.setup()
expect(loader._createObjects).toHaveBeenCalled()
it 'returns the externalObject', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
externalObject = loader.setup()
expect(externalObject).toEqual(loader.externalObject)
describe '#getCacheObject', ->
it 'returns the object', ->
loader = createLoader()
opts = defaultLoadOptions()
loader.setup(opts)
cacheKey = loader.loadOptions.cacheKey
expect(loader.getCacheObject()).toBeUndefined()
fakeCache = [key: "tasks", id: 5]
loader.storageManager.getCollectionDetails(loader._getCollectionName()).cache[cacheKey] = fakeCache
expect(loader.getCacheObject()).toEqual fakeCache
describe '#load', ->
describe 'sanity checking loadOptions', ->
funct = null
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
funct = -> loader.load()
it 'throws if there are no loadOptions', ->
expect(funct).toThrow()
it 'does not throw if there are loadOptions', ->
loader.loadOptions = {}
expect(funct).not.toThrow()
describe 'checking the cache', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
context 'loadOptions.cache is true', ->
it 'calls #_checkCacheForData', ->
loader.setup()
expect(loader.loadOptions.cache).toEqual(true)
loader.load()
expect(loader._checkCacheForData).toHaveBeenCalled()
context '#_checkCacheForData returns data', ->
it 'returns the data', ->
fakeData = ['some', 'stuff']
loader._checkCacheForData.and.returnValue(fakeData)
loader.setup()
expect(loader.load()).toEqual(fakeData)
context '#_checkCacheForData does not return data', ->
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
context 'loadOptions.cache is false', ->
it 'does not call #_checkCacheForData', ->
loader.setup(cache: false)
loader.load()
expect(loader._checkCacheForData).not.toHaveBeenCalled()
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
describe '#_getIdsForAssociation', ->
it 'returns the flattened, unique, sorted, and non-null IDs from the models that are returned from #_getModelsForAssociation', ->
loader = createLoader()
expect(loader._getIdsForAssociation('foo')).toEqual [1, 2, 4, 5, 6]
describe '#_updateObjects', ->
fakeObj = null
beforeEach ->
loader = createLoader()
fakeObj = setLoaded: jasmine.createSpy()
loader._updateObjects.and.callThrough()
it 'sets the object to loaded if silent is false', ->
loader._updateObjects(fakeObj, {})
expect(fakeObj.setLoaded).toHaveBeenCalled()
it 'does not set the object to loaded if silent is true', ->
loader._updateObjects(fakeObj, {}, true)
expect(fakeObj.setLoaded).not.toHaveBeenCalled()
describe '#_parseLoadOptions', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
it 'saves off a reference of the loadOptions as originalOptions', ->
loader._parseLoadOptions(defaultLoadOptions())
expect(loader.originalOptions).toEqual(defaultLoadOptions())
it 'parses the include options', ->
opts.include = ['foo': ['bar'], 'toad', 'stool']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.include).toEqual [
{ foo: [{ bar: [ ]}] }
{ toad: [] }
{ stool: [] }
]
describe 'only parsing', ->
context 'only is present', ->
it 'sets only as an array of strings from the original only', ->
opts.only = [1, 2, 3, 4]
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual ['1', '2', '3', '4']
context 'only is not present', ->
it 'sets only as null', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual(null)
it 'defaults filters to an empty object', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual {}
# make sure it leaves them alone if they are present
opts.filters = filters = foo: 'bar'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual filters
it 'pulls of the top layer of includes and sets them as thisLayerInclude', ->
opts.include = ['foo': ['bar'], 'toad': ['stool'], 'mushroom']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.thisLayerInclude).toEqual ['foo', 'toad', 'mushroom']
it 'defaults cache to true', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual true
# make sure it leaves cache alone if it is present
opts.cache = false
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'sets cache to false if search is present', ->
opts = _.extend opts, cache: true, search: 'term'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'builds a cache key', ->
# order, filterKeys, page, perPage, limit, offset
myOpts =
order: 'myOrder'
filters:
key1: 'value1'
key2: 'value2'
key3:
value1: 'a'
value2: 'b'
page: 1
perPage: 200
limit: 50
offset: 0
only: [3, 1, 2]
search: 'foobar'
opts = _.extend(opts, myOpts)
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cacheKey).toEqual 'myOrder|{"key1":"value1","key2":"value2","key3":{"value1":"a","value2":"b"}}|1,2,3|1|200|50|0|foobar'
it 'sets the cachedCollection on the loader from the storageManager', ->
loader._parseLoadOptions(opts)
expect(loader.cachedCollection).toEqual loader.storageManager.storage(loader.loadOptions.name)
describe '#_checkCacheForData', ->
opts = null
taskOne = taskTwo = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_onLoadSuccess')
taskOne = buildTask(id: 2)
taskTwo = buildTask(id: 3)
notFound = (loader, opts) ->
loader.setup(opts)
ret = loader._checkCacheForData()
expect(ret).toEqual false
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'only query', ->
beforeEach ->
opts.only = ['2', '3']
context 'the requested IDs have all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
it 'calls #_onLoadSuccess with the models from the cache', ->
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'the requested IDs have not all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne])
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'when optional fields have been requested but the fields arent on all the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'when optional fields have been requested and the fields are already on the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value for one')
taskTwo.set('test_field', 'fake value for two')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'not an only query', ->
context 'there exists a cache with this cacheKey', ->
beforeEach ->
loader.storageManager.storage('tasks').add taskOne
context 'cache is valid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "tasks", id: taskOne.id]
valid: true
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc|||||||'] = fakeCacheObject
context 'all of the cached models have their associations loaded', ->
beforeEach ->
taskOne.set('project_id', buildAndCacheProject().id, { silent: true })
it 'calls #_onLoadSuccess with the models from the cache', ->
opts.include = ['project']
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne])
context 'all of the cached models do not have their associations loaded', ->
it 'returns false and does not call #_onLoadSuccess', ->
opts.include = ['project']
loader.setup(opts)
notFound(loader, opts)
context 'all of the cached models have their optional fields loaded', ->
beforeEach ->
taskOne.set('test_field', 'test value', { silent: true })
opts.optionalFields = ['test_field']
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess).toHaveBeenCalledWith([taskOne])
context 'all of the cached models do not have their optional fields loaded', ->
beforeEach ->
opts.optionalFields = ['test_field']
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'cache is invalid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "tasks", id: taskOne.id]
valid: false
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc||||||'] = fakeCacheObject
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'there is no cache with this cacheKey', ->
it 'does not call #_onLoadSuccess and returns false', ->
loader.setup(opts)
notFound(loader, opts)
describe '#_loadFromServer', ->
opts = syncOpts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
syncOpts = data: 'foo'
spyOn(Backbone, 'sync').and.returnValue $.ajax()
spyOn(loader, '_buildSyncOptions').and.returnValue(syncOpts)
it 'calls Backbone.sync with the read, the, internalObject, and #_buildSyncOptions', ->
loader.setup(opts)
loader._loadFromServer()
expect(Backbone.sync).toHaveBeenCalledWith 'read', loader.internalObject, syncOpts
it 'puts the jqXhr on the returnValues if present', ->
opts.returnValues = returnValues = {}
loader.setup(opts)
loader._loadFromServer()
expect(returnValues.jqXhr.success).not.toBeUndefined()
it 'returns the externalObject', ->
loader.setup(opts)
ret = loader._loadFromServer()
expect(ret).toEqual loader.externalObject
describe '#_calculateAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2]
it 'adds each additional (sub) include to the additionalIncludes array', ->
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
expect(loader.additionalIncludes.length).toEqual 2
expect(loader.additionalIncludes).toEqual [
{ name: 'project', ids: [1, 2], include: [participants: []] }
{ name: 'assignees', ids: [1, 2], include: [something_else: []] }
]
describe '#_loadAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = _.extend defaultLoadOptions(),
cache: false
headers:
'X-Feature-Name': 'a-feature'
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
spyOn(loader, '_onLoadingCompleted')
it 'respects "cache" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].cache).toBe(false)
it 'respects "feature_name" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].headers['X-Feature-Name']).toBe('a-feature')
it 'creates a request for each additional include and calls #_onLoadingCompleted when they all are done', ->
promises = []
spyOn(loader.storageManager, 'loadObject').and.callFake ->
promise = $.Deferred()
promises.push(promise)
promise
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
expect(promises.length).toEqual 2
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
for promise in promises
promise.resolve()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
describe 'batching', ->
beforeEach ->
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2, 3, 4, 5]
spyOn(loader.storageManager, 'loadObject')
context 'there are less than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 100
it 'makes a single request for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
context 'there are more than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 2
it 'makes multiple requests for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 6
describe '#_buildSyncOptions', ->
syncOptions = opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
getSyncOptions = (loader, opts) ->
loader.setup(opts)
loader._buildSyncOptions()
it 'sets parse to true', ->
expect(getSyncOptions(loader, opts).parse).toEqual(true)
it 'sets error as #_onServerLoadError', ->
expect(getSyncOptions(loader, opts).error).toEqual(loader._onServerLoadError)
it 'sets success as #_onServerLoadSuccess', ->
expect(getSyncOptions(loader, opts).success).toEqual(loader._onServerLoadSuccess)
it 'sets data.include to be the layer of includes that this loader is loading', ->
opts.include = [
task: [ workspace: ['participants'] ]
'time_entries'
]
expect(getSyncOptions(loader, opts).data.include).toEqual('task,time_entries')
it 'sets the headers', ->
opts.headers = {
'X-Custom-Header': 'custom-header-value',
}
expect(getSyncOptions(loader, opts).headers['X-Custom-Header']).toEqual('custom-header-value')
describe 'data.only', ->
context 'this is an only load', ->
context '#_shouldUseOnly returns true', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'sets data.only to comma separated ids', ->
opts.only = [1, 2, 3, 4]
expect(getSyncOptions(loader, opts).data.only).toEqual '1,2,3,4'
context '#_shouldUseOnly returns false', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
context 'this is not an only load', ->
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
describe 'data.order', ->
it 'sets order to be loadOptions.order if present', ->
opts.order = 'foo'
expect(getSyncOptions(loader, opts).data.order).toEqual 'foo'
describe 'extending data with filters and custom params', ->
blacklist = ['include', 'only', 'order', 'per_page', 'page', 'limit', 'offset', 'search']
excludesBlacklistFromObject = (object) ->
object[key] = 'overwritten' for key in blacklist
data = getSyncOptions(loader, opts).data
expect(data[key]).toBeUndefined() for key in blacklist
context 'filters do not exist', ->
beforeEach ->
opts.filters = undefined
it 'does not throw an error parsing filters', ->
expect()
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'filters exist', ->
beforeEach ->
opts.filters = {}
it 'includes filter in data object', ->
opts.filters.foo = 'bar'
data = getSyncOptions(loader, opts).data
expect(data.foo).toEqual 'bar'
it 'excludes blacklisted brainstem specific keys from filters', ->
excludesBlacklistFromObject(opts.filters)
context 'params do not exist', ->
beforeEach ->
opts.params = undefined
it 'does not throw an error parsing params', ->
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'custom params exist', ->
beforeEach ->
opts.params = {}
it 'includes custom params in data object', ->
opts.params = { color: 'red' }
data = getSyncOptions(loader, opts).data
expect(data.color).toEqual 'red'
it 'excludes blacklisted brainstem specific keys from custom params', ->
excludesBlacklistFromObject(opts.params)
describe 'pagination', ->
beforeEach ->
opts.offset = 0
opts.limit = 25
opts.perPage = 25
opts.page = 1
context 'not an only request', ->
context 'there is a limit and offset', ->
it 'adds limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toEqual 25
expect(data.offset).toEqual 0
it 'does not add per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
context 'there is not a limit and offset', ->
beforeEach ->
delete opts.limit
delete opts.offset
it 'adds per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toEqual 25
expect(data.page).toEqual 1
it 'does not add limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
context 'only request', ->
beforeEach ->
opts.only = 1
it 'does not add limit, offset, per_page, or page', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
describe 'data.search', ->
it 'sets data.search to be loadOptions.search if present', ->
opts.search = 'term'
expect(getSyncOptions(loader, opts).data.search).toEqual 'term'
describe '#_shouldUseOnly', ->
it 'returns true if internalObject is an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Collection()
expect(loader._shouldUseOnly()).toEqual true
it 'returns false if internalObject is not an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Model()
expect(loader._shouldUseOnly()).toEqual false
describe '#_modelsOrObj', ->
beforeEach ->
loader = createLoader()
context 'obj is a Backbone.Collection', ->
it 'returns the models from the collection', ->
collection = new Backbone.Collection()
collection.add([new Backbone.Model(), new Backbone.Model])
expect(loader._modelsOrObj(collection)).toEqual(collection.models)
context 'obj is a single object', ->
it 'returns obj wrapped in an array', ->
obj = new Backbone.Model()
expect(loader._modelsOrObj(obj)).toEqual([obj])
context 'obj is an array', ->
it 'returns obj', ->
obj = []
expect(loader._modelsOrObj(obj)).toEqual(obj)
context 'obj is undefined', ->
it 'returns an empty array', ->
obj = null
expect(loader._modelsOrObj(obj)).toEqual([])
describe '#_onServerLoadSuccess', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_onLoadSuccess')
it 'calls #_updateStorageManagerFromResponse with the response', ->
loader._onServerLoadSuccess('response')
expect(loader._updateStorageManagerFromResponse).toHaveBeenCalledWith 'response'
it 'calls #_onServerLoadSuccess with the result from #_updateStorageManagerFromResponse', ->
loader._updateStorageManagerFromResponse.and.returnValue 'data'
loader._onServerLoadSuccess()
expect(loader._onLoadSuccess).toHaveBeenCalledWith 'data'
describe '#_onLoadSuccess', ->
beforeEach ->
loader = createLoader()
loader.additionalIncludes = []
spyOn(loader, '_onLoadingCompleted')
spyOn(loader, '_loadAdditionalIncludes')
spyOn(loader, '_calculateAdditionalIncludes')
it 'calls #_updateObjects with the internalObject, the data, and silent set to true', ->
loader._onLoadSuccess('test data')
expect(loader._updateObjects).toHaveBeenCalledWith(loader.internalObject, 'test data', true)
it 'calls #_calculateAdditionalIncludes', ->
loader._onLoadSuccess()
expect(loader._calculateAdditionalIncludes).toHaveBeenCalled()
context 'additional includes are needed', ->
it 'calls #_loadAdditionalIncludes', ->
loader._calculateAdditionalIncludes.and.callFake -> @additionalIncludes = ['foo']
loader._onLoadSuccess()
expect(loader._loadAdditionalIncludes).toHaveBeenCalled()
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
context 'additional includes are not needed', ->
it 'calls #_onLoadingCompleted', ->
loader._onLoadSuccess()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
expect(loader._loadAdditionalIncludes).not.toHaveBeenCalled()
describe '#_onLoadingCompleted', ->
beforeEach ->
loader = createLoader()
it 'calls #_updateObjects with the externalObject and internalObject', ->
loader._onLoadingCompleted()
expect(loader._updateObjects).toHaveBeenCalledWith(loader.externalObject, loader.internalObject)
it 'resolves the deferred object with the externalObject', ->
spy = jasmine.createSpy()
loader.then(spy)
loader._onLoadingCompleted()
expect(spy).toHaveBeenCalledWith(loader.externalObject)
| 163930 | _ = require 'underscore'
$ = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $ # TODO remove after upgrading to backbone 1.2+
StorageManager = require '../../src/storage-manager'
Tasks = require '../helpers/models/tasks'
registerSharedBehavior "AbstractLoaderSharedBehavior", (sharedContext) ->
loader = loaderClass = null
beforeEach ->
loaderClass = sharedContext.loaderClass
fakeNestedInclude = ['parent', { project: ['participants'] }, { assignees: ['something_else'] }]
defaultLoadOptions = ->
name: 'tasks'
createLoader = (opts = {}) ->
storageManager = StorageManager.get()
storageManager.addCollection('tasks', Tasks)
defaults =
storageManager: storageManager
loader = new loaderClass(_.extend {}, defaults, opts)
loader._getCollectionName = -> 'tasks'
loader._createObjects = ->
@internalObject = bar: 'foo'
@externalObject = foo: 'bar'
loader._getModelsForAssociation = -> [{ id: 5 }, { id: 2 }, { id: 1 }, { id: 4 }, { id: 1 }, [{ id: 6 }], { id: null }]
loader._getModel = -> Tasks::model
loader._updateStorageManagerFromResponse = jasmine.createSpy()
loader._updateObjects = (obj, data, silent) ->
obj.setLoaded true unless silent
spyOn(loader, '_updateObjects')
loader
describe '#constructor', ->
it 'saves off a reference to the passed in StorageManager', ->
storageManager = StorageManager.get()
loader = createLoader(storageManager: storageManager)
expect(loader.storageManager).toEqual storageManager
it 'creates a deferred object and turns the loader into a promise', ->
spy = jasmine.createSpy('promise spy')
loader = createLoader()
expect(loader._deferred).not.toBeUndefined()
loader.then(spy)
loader._deferred.resolve()
expect(spy).toHaveBeenCalled()
describe 'options.loadOptions', ->
it 'calls #setup with loadOptions if loadOptions were passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader(loadOptions: defaultLoadOptions())
expect(spy).toHaveBeenCalledWith defaultLoadOptions()
it 'does not call #setup if loadOptions were not passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader()
expect(spy).not.toHaveBeenCalled()
describe '#setup', ->
it 'calls #_parseLoadOptions with the loadOptions', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
opts = foo: 'bar'
loader.setup(opts)
expect(loader._parseLoadOptions).toHaveBeenCalledWith(opts)
it 'calls _createObjects', ->
loader = createLoader()
spyOn(loader, '_createObjects')
loader.setup()
expect(loader._createObjects).toHaveBeenCalled()
it 'returns the externalObject', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
externalObject = loader.setup()
expect(externalObject).toEqual(loader.externalObject)
describe '#getCacheObject', ->
it 'returns the object', ->
loader = createLoader()
opts = defaultLoadOptions()
loader.setup(opts)
cacheKey = loader.loadOptions.cacheKey
expect(loader.getCacheObject()).toBeUndefined()
fakeCache = [key: "tasks", id: 5]
loader.storageManager.getCollectionDetails(loader._getCollectionName()).cache[cacheKey] = fakeCache
expect(loader.getCacheObject()).toEqual fakeCache
describe '#load', ->
describe 'sanity checking loadOptions', ->
funct = null
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
funct = -> loader.load()
it 'throws if there are no loadOptions', ->
expect(funct).toThrow()
it 'does not throw if there are loadOptions', ->
loader.loadOptions = {}
expect(funct).not.toThrow()
describe 'checking the cache', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
context 'loadOptions.cache is true', ->
it 'calls #_checkCacheForData', ->
loader.setup()
expect(loader.loadOptions.cache).toEqual(true)
loader.load()
expect(loader._checkCacheForData).toHaveBeenCalled()
context '#_checkCacheForData returns data', ->
it 'returns the data', ->
fakeData = ['some', 'stuff']
loader._checkCacheForData.and.returnValue(fakeData)
loader.setup()
expect(loader.load()).toEqual(fakeData)
context '#_checkCacheForData does not return data', ->
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
context 'loadOptions.cache is false', ->
it 'does not call #_checkCacheForData', ->
loader.setup(cache: false)
loader.load()
expect(loader._checkCacheForData).not.toHaveBeenCalled()
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
describe '#_getIdsForAssociation', ->
it 'returns the flattened, unique, sorted, and non-null IDs from the models that are returned from #_getModelsForAssociation', ->
loader = createLoader()
expect(loader._getIdsForAssociation('foo')).toEqual [1, 2, 4, 5, 6]
describe '#_updateObjects', ->
fakeObj = null
beforeEach ->
loader = createLoader()
fakeObj = setLoaded: jasmine.createSpy()
loader._updateObjects.and.callThrough()
it 'sets the object to loaded if silent is false', ->
loader._updateObjects(fakeObj, {})
expect(fakeObj.setLoaded).toHaveBeenCalled()
it 'does not set the object to loaded if silent is true', ->
loader._updateObjects(fakeObj, {}, true)
expect(fakeObj.setLoaded).not.toHaveBeenCalled()
describe '#_parseLoadOptions', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
it 'saves off a reference of the loadOptions as originalOptions', ->
loader._parseLoadOptions(defaultLoadOptions())
expect(loader.originalOptions).toEqual(defaultLoadOptions())
it 'parses the include options', ->
opts.include = ['foo': ['bar'], 'toad', 'stool']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.include).toEqual [
{ foo: [{ bar: [ ]}] }
{ toad: [] }
{ stool: [] }
]
describe 'only parsing', ->
context 'only is present', ->
it 'sets only as an array of strings from the original only', ->
opts.only = [1, 2, 3, 4]
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual ['1', '2', '3', '4']
context 'only is not present', ->
it 'sets only as null', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual(null)
it 'defaults filters to an empty object', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual {}
# make sure it leaves them alone if they are present
opts.filters = filters = foo: 'bar'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual filters
it 'pulls of the top layer of includes and sets them as thisLayerInclude', ->
opts.include = ['foo': ['bar'], 'toad': ['stool'], 'mushroom']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.thisLayerInclude).toEqual ['foo', 'toad', 'mushroom']
it 'defaults cache to true', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual true
# make sure it leaves cache alone if it is present
opts.cache = false
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'sets cache to false if search is present', ->
opts = _.extend opts, cache: true, search: 'term'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'builds a cache key', ->
# order, filterKeys, page, perPage, limit, offset
myOpts =
order: 'myOrder'
filters:
key1: '<KEY>'
key2: '<KEY>'
key3:
value1: '<KEY>'
value2: '<KEY>'
page: 1
perPage: 200
limit: 50
offset: 0
only: [3, 1, 2]
search: 'foobar'
opts = _.extend(opts, myOpts)
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cacheKey).toEqual 'myOrder|{"key1":"value1","key2":"value2","key3":{"value1":"a","value2":"b"}}|1,2,3|1|200|50|0|foobar'
it 'sets the cachedCollection on the loader from the storageManager', ->
loader._parseLoadOptions(opts)
expect(loader.cachedCollection).toEqual loader.storageManager.storage(loader.loadOptions.name)
describe '#_checkCacheForData', ->
opts = null
taskOne = taskTwo = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_onLoadSuccess')
taskOne = buildTask(id: 2)
taskTwo = buildTask(id: 3)
notFound = (loader, opts) ->
loader.setup(opts)
ret = loader._checkCacheForData()
expect(ret).toEqual false
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'only query', ->
beforeEach ->
opts.only = ['2', '3']
context 'the requested IDs have all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
it 'calls #_onLoadSuccess with the models from the cache', ->
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'the requested IDs have not all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne])
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'when optional fields have been requested but the fields arent on all the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'when optional fields have been requested and the fields are already on the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value for one')
taskTwo.set('test_field', 'fake value for two')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'not an only query', ->
context 'there exists a cache with this cacheKey', ->
beforeEach ->
loader.storageManager.storage('tasks').add taskOne
context 'cache is valid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "tasks", id: taskOne.id]
valid: true
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc|||||||'] = fakeCacheObject
context 'all of the cached models have their associations loaded', ->
beforeEach ->
taskOne.set('project_id', buildAndCacheProject().id, { silent: true })
it 'calls #_onLoadSuccess with the models from the cache', ->
opts.include = ['project']
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne])
context 'all of the cached models do not have their associations loaded', ->
it 'returns false and does not call #_onLoadSuccess', ->
opts.include = ['project']
loader.setup(opts)
notFound(loader, opts)
context 'all of the cached models have their optional fields loaded', ->
beforeEach ->
taskOne.set('test_field', 'test value', { silent: true })
opts.optionalFields = ['test_field']
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess).toHaveBeenCalledWith([taskOne])
context 'all of the cached models do not have their optional fields loaded', ->
beforeEach ->
opts.optionalFields = ['test_field']
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'cache is invalid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "<KEY>", id: taskOne.id]
valid: false
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc||||||'] = fakeCacheObject
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'there is no cache with this cacheKey', ->
it 'does not call #_onLoadSuccess and returns false', ->
loader.setup(opts)
notFound(loader, opts)
describe '#_loadFromServer', ->
opts = syncOpts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
syncOpts = data: 'foo'
spyOn(Backbone, 'sync').and.returnValue $.ajax()
spyOn(loader, '_buildSyncOptions').and.returnValue(syncOpts)
it 'calls Backbone.sync with the read, the, internalObject, and #_buildSyncOptions', ->
loader.setup(opts)
loader._loadFromServer()
expect(Backbone.sync).toHaveBeenCalledWith 'read', loader.internalObject, syncOpts
it 'puts the jqXhr on the returnValues if present', ->
opts.returnValues = returnValues = {}
loader.setup(opts)
loader._loadFromServer()
expect(returnValues.jqXhr.success).not.toBeUndefined()
it 'returns the externalObject', ->
loader.setup(opts)
ret = loader._loadFromServer()
expect(ret).toEqual loader.externalObject
describe '#_calculateAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2]
it 'adds each additional (sub) include to the additionalIncludes array', ->
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
expect(loader.additionalIncludes.length).toEqual 2
expect(loader.additionalIncludes).toEqual [
{ name: 'project', ids: [1, 2], include: [participants: []] }
{ name: 'assignees', ids: [1, 2], include: [something_else: []] }
]
describe '#_loadAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = _.extend defaultLoadOptions(),
cache: false
headers:
'X-Feature-Name': 'a-feature'
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
spyOn(loader, '_onLoadingCompleted')
it 'respects "cache" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].cache).toBe(false)
it 'respects "feature_name" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].headers['X-Feature-Name']).toBe('a-feature')
it 'creates a request for each additional include and calls #_onLoadingCompleted when they all are done', ->
promises = []
spyOn(loader.storageManager, 'loadObject').and.callFake ->
promise = $.Deferred()
promises.push(promise)
promise
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
expect(promises.length).toEqual 2
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
for promise in promises
promise.resolve()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
describe 'batching', ->
beforeEach ->
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2, 3, 4, 5]
spyOn(loader.storageManager, 'loadObject')
context 'there are less than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 100
it 'makes a single request for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
context 'there are more than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 2
it 'makes multiple requests for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 6
describe '#_buildSyncOptions', ->
syncOptions = opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
getSyncOptions = (loader, opts) ->
loader.setup(opts)
loader._buildSyncOptions()
it 'sets parse to true', ->
expect(getSyncOptions(loader, opts).parse).toEqual(true)
it 'sets error as #_onServerLoadError', ->
expect(getSyncOptions(loader, opts).error).toEqual(loader._onServerLoadError)
it 'sets success as #_onServerLoadSuccess', ->
expect(getSyncOptions(loader, opts).success).toEqual(loader._onServerLoadSuccess)
it 'sets data.include to be the layer of includes that this loader is loading', ->
opts.include = [
task: [ workspace: ['participants'] ]
'time_entries'
]
expect(getSyncOptions(loader, opts).data.include).toEqual('task,time_entries')
it 'sets the headers', ->
opts.headers = {
'X-Custom-Header': 'custom-header-value',
}
expect(getSyncOptions(loader, opts).headers['X-Custom-Header']).toEqual('custom-header-value')
describe 'data.only', ->
context 'this is an only load', ->
context '#_shouldUseOnly returns true', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'sets data.only to comma separated ids', ->
opts.only = [1, 2, 3, 4]
expect(getSyncOptions(loader, opts).data.only).toEqual '1,2,3,4'
context '#_shouldUseOnly returns false', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
context 'this is not an only load', ->
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
describe 'data.order', ->
it 'sets order to be loadOptions.order if present', ->
opts.order = 'foo'
expect(getSyncOptions(loader, opts).data.order).toEqual 'foo'
describe 'extending data with filters and custom params', ->
blacklist = ['include', 'only', 'order', 'per_page', 'page', 'limit', 'offset', 'search']
excludesBlacklistFromObject = (object) ->
object[key] = 'overwritten' for key in blacklist
data = getSyncOptions(loader, opts).data
expect(data[key]).toBeUndefined() for key in blacklist
context 'filters do not exist', ->
beforeEach ->
opts.filters = undefined
it 'does not throw an error parsing filters', ->
expect()
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'filters exist', ->
beforeEach ->
opts.filters = {}
it 'includes filter in data object', ->
opts.filters.foo = 'bar'
data = getSyncOptions(loader, opts).data
expect(data.foo).toEqual 'bar'
it 'excludes blacklisted brainstem specific keys from filters', ->
excludesBlacklistFromObject(opts.filters)
context 'params do not exist', ->
beforeEach ->
opts.params = undefined
it 'does not throw an error parsing params', ->
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'custom params exist', ->
beforeEach ->
opts.params = {}
it 'includes custom params in data object', ->
opts.params = { color: 'red' }
data = getSyncOptions(loader, opts).data
expect(data.color).toEqual 'red'
it 'excludes blacklisted brainstem specific keys from custom params', ->
excludesBlacklistFromObject(opts.params)
describe 'pagination', ->
beforeEach ->
opts.offset = 0
opts.limit = 25
opts.perPage = 25
opts.page = 1
context 'not an only request', ->
context 'there is a limit and offset', ->
it 'adds limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toEqual 25
expect(data.offset).toEqual 0
it 'does not add per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
context 'there is not a limit and offset', ->
beforeEach ->
delete opts.limit
delete opts.offset
it 'adds per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toEqual 25
expect(data.page).toEqual 1
it 'does not add limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
context 'only request', ->
beforeEach ->
opts.only = 1
it 'does not add limit, offset, per_page, or page', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
describe 'data.search', ->
it 'sets data.search to be loadOptions.search if present', ->
opts.search = 'term'
expect(getSyncOptions(loader, opts).data.search).toEqual 'term'
describe '#_shouldUseOnly', ->
it 'returns true if internalObject is an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Collection()
expect(loader._shouldUseOnly()).toEqual true
it 'returns false if internalObject is not an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Model()
expect(loader._shouldUseOnly()).toEqual false
describe '#_modelsOrObj', ->
beforeEach ->
loader = createLoader()
context 'obj is a Backbone.Collection', ->
it 'returns the models from the collection', ->
collection = new Backbone.Collection()
collection.add([new Backbone.Model(), new Backbone.Model])
expect(loader._modelsOrObj(collection)).toEqual(collection.models)
context 'obj is a single object', ->
it 'returns obj wrapped in an array', ->
obj = new Backbone.Model()
expect(loader._modelsOrObj(obj)).toEqual([obj])
context 'obj is an array', ->
it 'returns obj', ->
obj = []
expect(loader._modelsOrObj(obj)).toEqual(obj)
context 'obj is undefined', ->
it 'returns an empty array', ->
obj = null
expect(loader._modelsOrObj(obj)).toEqual([])
describe '#_onServerLoadSuccess', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_onLoadSuccess')
it 'calls #_updateStorageManagerFromResponse with the response', ->
loader._onServerLoadSuccess('response')
expect(loader._updateStorageManagerFromResponse).toHaveBeenCalledWith 'response'
it 'calls #_onServerLoadSuccess with the result from #_updateStorageManagerFromResponse', ->
loader._updateStorageManagerFromResponse.and.returnValue 'data'
loader._onServerLoadSuccess()
expect(loader._onLoadSuccess).toHaveBeenCalledWith 'data'
describe '#_onLoadSuccess', ->
beforeEach ->
loader = createLoader()
loader.additionalIncludes = []
spyOn(loader, '_onLoadingCompleted')
spyOn(loader, '_loadAdditionalIncludes')
spyOn(loader, '_calculateAdditionalIncludes')
it 'calls #_updateObjects with the internalObject, the data, and silent set to true', ->
loader._onLoadSuccess('test data')
expect(loader._updateObjects).toHaveBeenCalledWith(loader.internalObject, 'test data', true)
it 'calls #_calculateAdditionalIncludes', ->
loader._onLoadSuccess()
expect(loader._calculateAdditionalIncludes).toHaveBeenCalled()
context 'additional includes are needed', ->
it 'calls #_loadAdditionalIncludes', ->
loader._calculateAdditionalIncludes.and.callFake -> @additionalIncludes = ['foo']
loader._onLoadSuccess()
expect(loader._loadAdditionalIncludes).toHaveBeenCalled()
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
context 'additional includes are not needed', ->
it 'calls #_onLoadingCompleted', ->
loader._onLoadSuccess()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
expect(loader._loadAdditionalIncludes).not.toHaveBeenCalled()
describe '#_onLoadingCompleted', ->
beforeEach ->
loader = createLoader()
it 'calls #_updateObjects with the externalObject and internalObject', ->
loader._onLoadingCompleted()
expect(loader._updateObjects).toHaveBeenCalledWith(loader.externalObject, loader.internalObject)
it 'resolves the deferred object with the externalObject', ->
spy = jasmine.createSpy()
loader.then(spy)
loader._onLoadingCompleted()
expect(spy).toHaveBeenCalledWith(loader.externalObject)
| true | _ = require 'underscore'
$ = require 'jquery'
Backbone = require 'backbone'
Backbone.$ = $ # TODO remove after upgrading to backbone 1.2+
StorageManager = require '../../src/storage-manager'
Tasks = require '../helpers/models/tasks'
registerSharedBehavior "AbstractLoaderSharedBehavior", (sharedContext) ->
loader = loaderClass = null
beforeEach ->
loaderClass = sharedContext.loaderClass
fakeNestedInclude = ['parent', { project: ['participants'] }, { assignees: ['something_else'] }]
defaultLoadOptions = ->
name: 'tasks'
createLoader = (opts = {}) ->
storageManager = StorageManager.get()
storageManager.addCollection('tasks', Tasks)
defaults =
storageManager: storageManager
loader = new loaderClass(_.extend {}, defaults, opts)
loader._getCollectionName = -> 'tasks'
loader._createObjects = ->
@internalObject = bar: 'foo'
@externalObject = foo: 'bar'
loader._getModelsForAssociation = -> [{ id: 5 }, { id: 2 }, { id: 1 }, { id: 4 }, { id: 1 }, [{ id: 6 }], { id: null }]
loader._getModel = -> Tasks::model
loader._updateStorageManagerFromResponse = jasmine.createSpy()
loader._updateObjects = (obj, data, silent) ->
obj.setLoaded true unless silent
spyOn(loader, '_updateObjects')
loader
describe '#constructor', ->
it 'saves off a reference to the passed in StorageManager', ->
storageManager = StorageManager.get()
loader = createLoader(storageManager: storageManager)
expect(loader.storageManager).toEqual storageManager
it 'creates a deferred object and turns the loader into a promise', ->
spy = jasmine.createSpy('promise spy')
loader = createLoader()
expect(loader._deferred).not.toBeUndefined()
loader.then(spy)
loader._deferred.resolve()
expect(spy).toHaveBeenCalled()
describe 'options.loadOptions', ->
it 'calls #setup with loadOptions if loadOptions were passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader(loadOptions: defaultLoadOptions())
expect(spy).toHaveBeenCalledWith defaultLoadOptions()
it 'does not call #setup if loadOptions were not passed in', ->
spy = spyOn(loaderClass.prototype, 'setup')
loader = createLoader()
expect(spy).not.toHaveBeenCalled()
describe '#setup', ->
it 'calls #_parseLoadOptions with the loadOptions', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
opts = foo: 'bar'
loader.setup(opts)
expect(loader._parseLoadOptions).toHaveBeenCalledWith(opts)
it 'calls _createObjects', ->
loader = createLoader()
spyOn(loader, '_createObjects')
loader.setup()
expect(loader._createObjects).toHaveBeenCalled()
it 'returns the externalObject', ->
loader = createLoader()
spyOn(loader, '_parseLoadOptions')
externalObject = loader.setup()
expect(externalObject).toEqual(loader.externalObject)
describe '#getCacheObject', ->
it 'returns the object', ->
loader = createLoader()
opts = defaultLoadOptions()
loader.setup(opts)
cacheKey = loader.loadOptions.cacheKey
expect(loader.getCacheObject()).toBeUndefined()
fakeCache = [key: "tasks", id: 5]
loader.storageManager.getCollectionDetails(loader._getCollectionName()).cache[cacheKey] = fakeCache
expect(loader.getCacheObject()).toEqual fakeCache
describe '#load', ->
describe 'sanity checking loadOptions', ->
funct = null
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
funct = -> loader.load()
it 'throws if there are no loadOptions', ->
expect(funct).toThrow()
it 'does not throw if there are loadOptions', ->
loader.loadOptions = {}
expect(funct).not.toThrow()
describe 'checking the cache', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_checkCacheForData')
spyOn(loader, '_loadFromServer')
context 'loadOptions.cache is true', ->
it 'calls #_checkCacheForData', ->
loader.setup()
expect(loader.loadOptions.cache).toEqual(true)
loader.load()
expect(loader._checkCacheForData).toHaveBeenCalled()
context '#_checkCacheForData returns data', ->
it 'returns the data', ->
fakeData = ['some', 'stuff']
loader._checkCacheForData.and.returnValue(fakeData)
loader.setup()
expect(loader.load()).toEqual(fakeData)
context '#_checkCacheForData does not return data', ->
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
context 'loadOptions.cache is false', ->
it 'does not call #_checkCacheForData', ->
loader.setup(cache: false)
loader.load()
expect(loader._checkCacheForData).not.toHaveBeenCalled()
it 'calls #_loadFromServer', ->
loader.setup()
loader.load()
expect(loader._loadFromServer).toHaveBeenCalled()
describe '#_getIdsForAssociation', ->
it 'returns the flattened, unique, sorted, and non-null IDs from the models that are returned from #_getModelsForAssociation', ->
loader = createLoader()
expect(loader._getIdsForAssociation('foo')).toEqual [1, 2, 4, 5, 6]
describe '#_updateObjects', ->
fakeObj = null
beforeEach ->
loader = createLoader()
fakeObj = setLoaded: jasmine.createSpy()
loader._updateObjects.and.callThrough()
it 'sets the object to loaded if silent is false', ->
loader._updateObjects(fakeObj, {})
expect(fakeObj.setLoaded).toHaveBeenCalled()
it 'does not set the object to loaded if silent is true', ->
loader._updateObjects(fakeObj, {}, true)
expect(fakeObj.setLoaded).not.toHaveBeenCalled()
describe '#_parseLoadOptions', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
it 'saves off a reference of the loadOptions as originalOptions', ->
loader._parseLoadOptions(defaultLoadOptions())
expect(loader.originalOptions).toEqual(defaultLoadOptions())
it 'parses the include options', ->
opts.include = ['foo': ['bar'], 'toad', 'stool']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.include).toEqual [
{ foo: [{ bar: [ ]}] }
{ toad: [] }
{ stool: [] }
]
describe 'only parsing', ->
context 'only is present', ->
it 'sets only as an array of strings from the original only', ->
opts.only = [1, 2, 3, 4]
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual ['1', '2', '3', '4']
context 'only is not present', ->
it 'sets only as null', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.only).toEqual(null)
it 'defaults filters to an empty object', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual {}
# make sure it leaves them alone if they are present
opts.filters = filters = foo: 'bar'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.filters).toEqual filters
it 'pulls of the top layer of includes and sets them as thisLayerInclude', ->
opts.include = ['foo': ['bar'], 'toad': ['stool'], 'mushroom']
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.thisLayerInclude).toEqual ['foo', 'toad', 'mushroom']
it 'defaults cache to true', ->
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual true
# make sure it leaves cache alone if it is present
opts.cache = false
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'sets cache to false if search is present', ->
opts = _.extend opts, cache: true, search: 'term'
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cache).toEqual false
it 'builds a cache key', ->
# order, filterKeys, page, perPage, limit, offset
myOpts =
order: 'myOrder'
filters:
key1: 'PI:KEY:<KEY>END_PI'
key2: 'PI:KEY:<KEY>END_PI'
key3:
value1: 'PI:KEY:<KEY>END_PI'
value2: 'PI:KEY:<KEY>END_PI'
page: 1
perPage: 200
limit: 50
offset: 0
only: [3, 1, 2]
search: 'foobar'
opts = _.extend(opts, myOpts)
loadOptions = loader._parseLoadOptions(opts)
expect(loadOptions.cacheKey).toEqual 'myOrder|{"key1":"value1","key2":"value2","key3":{"value1":"a","value2":"b"}}|1,2,3|1|200|50|0|foobar'
it 'sets the cachedCollection on the loader from the storageManager', ->
loader._parseLoadOptions(opts)
expect(loader.cachedCollection).toEqual loader.storageManager.storage(loader.loadOptions.name)
describe '#_checkCacheForData', ->
opts = null
taskOne = taskTwo = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_onLoadSuccess')
taskOne = buildTask(id: 2)
taskTwo = buildTask(id: 3)
notFound = (loader, opts) ->
loader.setup(opts)
ret = loader._checkCacheForData()
expect(ret).toEqual false
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'only query', ->
beforeEach ->
opts.only = ['2', '3']
context 'the requested IDs have all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
it 'calls #_onLoadSuccess with the models from the cache', ->
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'the requested IDs have not all been loaded', ->
beforeEach ->
loader.storageManager.storage('tasks').add([taskOne])
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'when optional fields have been requested but the fields arent on all the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'when optional fields have been requested and the fields are already on the tasks', ->
beforeEach ->
opts.optionalFields = ['test_field']
taskOne.set('test_field', 'fake value for one')
taskTwo.set('test_field', 'fake value for two')
loader.storageManager.storage('tasks').add([taskOne, taskTwo])
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne, taskTwo])
context 'not an only query', ->
context 'there exists a cache with this cacheKey', ->
beforeEach ->
loader.storageManager.storage('tasks').add taskOne
context 'cache is valid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "tasks", id: taskOne.id]
valid: true
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc|||||||'] = fakeCacheObject
context 'all of the cached models have their associations loaded', ->
beforeEach ->
taskOne.set('project_id', buildAndCacheProject().id, { silent: true })
it 'calls #_onLoadSuccess with the models from the cache', ->
opts.include = ['project']
loader.setup(opts)
loader._checkCacheForData()
expect(loader._onLoadSuccess.calls.argsFor(0)[0]).toEqual([taskOne])
context 'all of the cached models do not have their associations loaded', ->
it 'returns false and does not call #_onLoadSuccess', ->
opts.include = ['project']
loader.setup(opts)
notFound(loader, opts)
context 'all of the cached models have their optional fields loaded', ->
beforeEach ->
taskOne.set('test_field', 'test value', { silent: true })
opts.optionalFields = ['test_field']
loader.setup(opts)
loader._checkCacheForData()
it 'calls #_onLoadSuccess with the models from the cache', ->
expect(loader._onLoadSuccess).toHaveBeenCalledWith([taskOne])
context 'all of the cached models do not have their optional fields loaded', ->
beforeEach ->
opts.optionalFields = ['test_field']
loader.setup(opts)
it 'returns false', ->
expect(loader._checkCacheForData()).toEqual(false)
it 'does not call #_onLoadSuccess', ->
loader._checkCacheForData()
expect(loader._onLoadSuccess).not.toHaveBeenCalled()
context 'cache is invalid', ->
beforeEach ->
fakeCacheObject =
count: 1
results: [key: "PI:KEY:<KEY>END_PI", id: taskOne.id]
valid: false
loader.storageManager.getCollectionDetails('tasks').cache['updated_at:desc||||||'] = fakeCacheObject
it 'returns false and does not call #_onLoadSuccess', ->
loader.setup(opts)
notFound(loader, opts)
context 'there is no cache with this cacheKey', ->
it 'does not call #_onLoadSuccess and returns false', ->
loader.setup(opts)
notFound(loader, opts)
describe '#_loadFromServer', ->
opts = syncOpts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
syncOpts = data: 'foo'
spyOn(Backbone, 'sync').and.returnValue $.ajax()
spyOn(loader, '_buildSyncOptions').and.returnValue(syncOpts)
it 'calls Backbone.sync with the read, the, internalObject, and #_buildSyncOptions', ->
loader.setup(opts)
loader._loadFromServer()
expect(Backbone.sync).toHaveBeenCalledWith 'read', loader.internalObject, syncOpts
it 'puts the jqXhr on the returnValues if present', ->
opts.returnValues = returnValues = {}
loader.setup(opts)
loader._loadFromServer()
expect(returnValues.jqXhr.success).not.toBeUndefined()
it 'returns the externalObject', ->
loader.setup(opts)
ret = loader._loadFromServer()
expect(ret).toEqual loader.externalObject
describe '#_calculateAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2]
it 'adds each additional (sub) include to the additionalIncludes array', ->
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
expect(loader.additionalIncludes.length).toEqual 2
expect(loader.additionalIncludes).toEqual [
{ name: 'project', ids: [1, 2], include: [participants: []] }
{ name: 'assignees', ids: [1, 2], include: [something_else: []] }
]
describe '#_loadAdditionalIncludes', ->
opts = null
beforeEach ->
loader = createLoader()
opts = _.extend defaultLoadOptions(),
cache: false
headers:
'X-Feature-Name': 'a-feature'
opts.include = fakeNestedInclude
loader.setup(opts)
loader._calculateAdditionalIncludes()
spyOn(loader, '_onLoadingCompleted')
it 'respects "cache" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].cache).toBe(false)
it 'respects "feature_name" option in nested includes', ->
spyOn(loader.storageManager, 'loadObject')
loader._loadAdditionalIncludes()
calls = loader.storageManager.loadObject.calls
expect(calls.count()).toBeGreaterThan(0)
for call in calls.all()
expect(call.args[1].headers['X-Feature-Name']).toBe('a-feature')
it 'creates a request for each additional include and calls #_onLoadingCompleted when they all are done', ->
promises = []
spyOn(loader.storageManager, 'loadObject').and.callFake ->
promise = $.Deferred()
promises.push(promise)
promise
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
expect(promises.length).toEqual 2
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
for promise in promises
promise.resolve()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
describe 'batching', ->
beforeEach ->
spyOn(loader, '_getIdsForAssociation').and.returnValue [1, 2, 3, 4, 5]
spyOn(loader.storageManager, 'loadObject')
context 'there are less than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 100
it 'makes a single request for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 2
context 'there are more than the associated ID limit', ->
beforeEach ->
loader.associationIdLimit = 2
it 'makes multiple requests for each association', ->
loader._loadAdditionalIncludes()
expect(loader.storageManager.loadObject.calls.count()).toEqual 6
describe '#_buildSyncOptions', ->
syncOptions = opts = null
beforeEach ->
loader = createLoader()
opts = defaultLoadOptions()
getSyncOptions = (loader, opts) ->
loader.setup(opts)
loader._buildSyncOptions()
it 'sets parse to true', ->
expect(getSyncOptions(loader, opts).parse).toEqual(true)
it 'sets error as #_onServerLoadError', ->
expect(getSyncOptions(loader, opts).error).toEqual(loader._onServerLoadError)
it 'sets success as #_onServerLoadSuccess', ->
expect(getSyncOptions(loader, opts).success).toEqual(loader._onServerLoadSuccess)
it 'sets data.include to be the layer of includes that this loader is loading', ->
opts.include = [
task: [ workspace: ['participants'] ]
'time_entries'
]
expect(getSyncOptions(loader, opts).data.include).toEqual('task,time_entries')
it 'sets the headers', ->
opts.headers = {
'X-Custom-Header': 'custom-header-value',
}
expect(getSyncOptions(loader, opts).headers['X-Custom-Header']).toEqual('custom-header-value')
describe 'data.only', ->
context 'this is an only load', ->
context '#_shouldUseOnly returns true', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'sets data.only to comma separated ids', ->
opts.only = [1, 2, 3, 4]
expect(getSyncOptions(loader, opts).data.only).toEqual '1,2,3,4'
context '#_shouldUseOnly returns false', ->
beforeEach ->
spyOn(loader, '_shouldUseOnly').and.returnValue(true)
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
context 'this is not an only load', ->
it 'does not set data.only', ->
expect(getSyncOptions(loader, opts).data.only).toBeUndefined()
describe 'data.order', ->
it 'sets order to be loadOptions.order if present', ->
opts.order = 'foo'
expect(getSyncOptions(loader, opts).data.order).toEqual 'foo'
describe 'extending data with filters and custom params', ->
blacklist = ['include', 'only', 'order', 'per_page', 'page', 'limit', 'offset', 'search']
excludesBlacklistFromObject = (object) ->
object[key] = 'overwritten' for key in blacklist
data = getSyncOptions(loader, opts).data
expect(data[key]).toBeUndefined() for key in blacklist
context 'filters do not exist', ->
beforeEach ->
opts.filters = undefined
it 'does not throw an error parsing filters', ->
expect()
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'filters exist', ->
beforeEach ->
opts.filters = {}
it 'includes filter in data object', ->
opts.filters.foo = 'bar'
data = getSyncOptions(loader, opts).data
expect(data.foo).toEqual 'bar'
it 'excludes blacklisted brainstem specific keys from filters', ->
excludesBlacklistFromObject(opts.filters)
context 'params do not exist', ->
beforeEach ->
opts.params = undefined
it 'does not throw an error parsing params', ->
expect(-> getSyncOptions(loader, opts)).not.toThrow()
context 'custom params exist', ->
beforeEach ->
opts.params = {}
it 'includes custom params in data object', ->
opts.params = { color: 'red' }
data = getSyncOptions(loader, opts).data
expect(data.color).toEqual 'red'
it 'excludes blacklisted brainstem specific keys from custom params', ->
excludesBlacklistFromObject(opts.params)
describe 'pagination', ->
beforeEach ->
opts.offset = 0
opts.limit = 25
opts.perPage = 25
opts.page = 1
context 'not an only request', ->
context 'there is a limit and offset', ->
it 'adds limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toEqual 25
expect(data.offset).toEqual 0
it 'does not add per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
context 'there is not a limit and offset', ->
beforeEach ->
delete opts.limit
delete opts.offset
it 'adds per_page and page', ->
data = getSyncOptions(loader, opts).data
expect(data.per_page).toEqual 25
expect(data.page).toEqual 1
it 'does not add limit and offset', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
context 'only request', ->
beforeEach ->
opts.only = 1
it 'does not add limit, offset, per_page, or page', ->
data = getSyncOptions(loader, opts).data
expect(data.limit).toBeUndefined()
expect(data.offset).toBeUndefined()
expect(data.per_page).toBeUndefined()
expect(data.page).toBeUndefined()
describe 'data.search', ->
it 'sets data.search to be loadOptions.search if present', ->
opts.search = 'term'
expect(getSyncOptions(loader, opts).data.search).toEqual 'term'
describe '#_shouldUseOnly', ->
it 'returns true if internalObject is an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Collection()
expect(loader._shouldUseOnly()).toEqual true
it 'returns false if internalObject is not an instance of a Backbone.Collection', ->
loader = createLoader()
loader.internalObject = new Backbone.Model()
expect(loader._shouldUseOnly()).toEqual false
describe '#_modelsOrObj', ->
beforeEach ->
loader = createLoader()
context 'obj is a Backbone.Collection', ->
it 'returns the models from the collection', ->
collection = new Backbone.Collection()
collection.add([new Backbone.Model(), new Backbone.Model])
expect(loader._modelsOrObj(collection)).toEqual(collection.models)
context 'obj is a single object', ->
it 'returns obj wrapped in an array', ->
obj = new Backbone.Model()
expect(loader._modelsOrObj(obj)).toEqual([obj])
context 'obj is an array', ->
it 'returns obj', ->
obj = []
expect(loader._modelsOrObj(obj)).toEqual(obj)
context 'obj is undefined', ->
it 'returns an empty array', ->
obj = null
expect(loader._modelsOrObj(obj)).toEqual([])
describe '#_onServerLoadSuccess', ->
beforeEach ->
loader = createLoader()
spyOn(loader, '_onLoadSuccess')
it 'calls #_updateStorageManagerFromResponse with the response', ->
loader._onServerLoadSuccess('response')
expect(loader._updateStorageManagerFromResponse).toHaveBeenCalledWith 'response'
it 'calls #_onServerLoadSuccess with the result from #_updateStorageManagerFromResponse', ->
loader._updateStorageManagerFromResponse.and.returnValue 'data'
loader._onServerLoadSuccess()
expect(loader._onLoadSuccess).toHaveBeenCalledWith 'data'
describe '#_onLoadSuccess', ->
beforeEach ->
loader = createLoader()
loader.additionalIncludes = []
spyOn(loader, '_onLoadingCompleted')
spyOn(loader, '_loadAdditionalIncludes')
spyOn(loader, '_calculateAdditionalIncludes')
it 'calls #_updateObjects with the internalObject, the data, and silent set to true', ->
loader._onLoadSuccess('test data')
expect(loader._updateObjects).toHaveBeenCalledWith(loader.internalObject, 'test data', true)
it 'calls #_calculateAdditionalIncludes', ->
loader._onLoadSuccess()
expect(loader._calculateAdditionalIncludes).toHaveBeenCalled()
context 'additional includes are needed', ->
it 'calls #_loadAdditionalIncludes', ->
loader._calculateAdditionalIncludes.and.callFake -> @additionalIncludes = ['foo']
loader._onLoadSuccess()
expect(loader._loadAdditionalIncludes).toHaveBeenCalled()
expect(loader._onLoadingCompleted).not.toHaveBeenCalled()
context 'additional includes are not needed', ->
it 'calls #_onLoadingCompleted', ->
loader._onLoadSuccess()
expect(loader._onLoadingCompleted).toHaveBeenCalled()
expect(loader._loadAdditionalIncludes).not.toHaveBeenCalled()
describe '#_onLoadingCompleted', ->
beforeEach ->
loader = createLoader()
it 'calls #_updateObjects with the externalObject and internalObject', ->
loader._onLoadingCompleted()
expect(loader._updateObjects).toHaveBeenCalledWith(loader.externalObject, loader.internalObject)
it 'resolves the deferred object with the externalObject', ->
spy = jasmine.createSpy()
loader.then(spy)
loader._onLoadingCompleted()
expect(spy).toHaveBeenCalledWith(loader.externalObject)
|
[
{
"context": "###\n\tfile: bace\n node server\n\n copyright mark hahn 2013\n MIT license\n https://github.com/mark-",
"end": 56,
"score": 0.9998092651367188,
"start": 47,
"tag": "NAME",
"value": "mark hahn"
},
{
"context": " hahn 2013\n MIT license\n https://github.... | src/server/server.coffee | mark-hahn/bace | 1 | ###
file: bace
node server
copyright mark hahn 2013
MIT license
https://github.com/mark-hahn/bace/
###
socket = 8935
require('source-map-support').install()
require 'colors'
fs = require 'fs'
https = require 'https'
_ = require 'underscore'
_.mixin require('underscore.string').exports()
user = require './user-srvr'
page = require './homepage-srvr'
sock = require './socket-srvr'
# check data/users.json
user.chkUsers()
do -> console.log '------ Starting Bace Server'.green
process.on 'uncaughtException', (args...) ->
do -> console.log '\nBace Server: ***** uncaughtException ****\n'.red, args
fileTypes = [
[new RegExp('\\.png$'), 'image/png', '' ]
[new RegExp('\\.css$'), 'text/css', '' ]
[new RegExp('\\.js$' ), 'text/javascript', '' ]
[new RegExp('\\.map$|\\.coffee$'), 'text/plain', '' ]
[new RegExp( 'favicon.ico$'), 'image/x-icon', 'images/']
]
options =
cert: fs.readFileSync 'keys/bace-cert.pem'
key: fs.readFileSync 'keys/bace-key.pem'
srvr = https.createServer options, (req, res) ->
# console.log '-- req:', req.url
for fileType in fileTypes
[regex, mime, pfx] = fileType
if not regex.test req.url then continue
fs.readFile (pfx ? '') + req.url[1..], (err, resp) ->
if err
# if not _.endsWith req.url, '.map'
do -> console.log 'bace: error serving file'.red, req.url, err
res.writeHead 404
res.end err.message
else
res.writeHead 200, 'Content-Type': mime
res.end resp
return
if req.url is '/'
res.writeHead 200
res.end page.render req, res
return
do -> console.log 'bace: invalid https request:', req.url
res.writeHead 404
res.end ''
srvr.listen socket
sock.startup srvr
| 37298 | ###
file: bace
node server
copyright <NAME> 2013
MIT license
https://github.com/mark-hahn/bace/
###
socket = 8935
require('source-map-support').install()
require 'colors'
fs = require 'fs'
https = require 'https'
_ = require 'underscore'
_.mixin require('underscore.string').exports()
user = require './user-srvr'
page = require './homepage-srvr'
sock = require './socket-srvr'
# check data/users.json
user.chkUsers()
do -> console.log '------ Starting Bace Server'.green
process.on 'uncaughtException', (args...) ->
do -> console.log '\nBace Server: ***** uncaughtException ****\n'.red, args
fileTypes = [
[new RegExp('\\.png$'), 'image/png', '' ]
[new RegExp('\\.css$'), 'text/css', '' ]
[new RegExp('\\.js$' ), 'text/javascript', '' ]
[new RegExp('\\.map$|\\.coffee$'), 'text/plain', '' ]
[new RegExp( 'favicon.ico$'), 'image/x-icon', 'images/']
]
options =
cert: fs.readFileSync 'keys/bace-cert.pem'
key: fs.readFileSync 'keys/bace-key.pem'
srvr = https.createServer options, (req, res) ->
# console.log '-- req:', req.url
for fileType in fileTypes
[regex, mime, pfx] = fileType
if not regex.test req.url then continue
fs.readFile (pfx ? '') + req.url[1..], (err, resp) ->
if err
# if not _.endsWith req.url, '.map'
do -> console.log 'bace: error serving file'.red, req.url, err
res.writeHead 404
res.end err.message
else
res.writeHead 200, 'Content-Type': mime
res.end resp
return
if req.url is '/'
res.writeHead 200
res.end page.render req, res
return
do -> console.log 'bace: invalid https request:', req.url
res.writeHead 404
res.end ''
srvr.listen socket
sock.startup srvr
| true | ###
file: bace
node server
copyright PI:NAME:<NAME>END_PI 2013
MIT license
https://github.com/mark-hahn/bace/
###
socket = 8935
require('source-map-support').install()
require 'colors'
fs = require 'fs'
https = require 'https'
_ = require 'underscore'
_.mixin require('underscore.string').exports()
user = require './user-srvr'
page = require './homepage-srvr'
sock = require './socket-srvr'
# check data/users.json
user.chkUsers()
do -> console.log '------ Starting Bace Server'.green
process.on 'uncaughtException', (args...) ->
do -> console.log '\nBace Server: ***** uncaughtException ****\n'.red, args
fileTypes = [
[new RegExp('\\.png$'), 'image/png', '' ]
[new RegExp('\\.css$'), 'text/css', '' ]
[new RegExp('\\.js$' ), 'text/javascript', '' ]
[new RegExp('\\.map$|\\.coffee$'), 'text/plain', '' ]
[new RegExp( 'favicon.ico$'), 'image/x-icon', 'images/']
]
options =
cert: fs.readFileSync 'keys/bace-cert.pem'
key: fs.readFileSync 'keys/bace-key.pem'
srvr = https.createServer options, (req, res) ->
# console.log '-- req:', req.url
for fileType in fileTypes
[regex, mime, pfx] = fileType
if not regex.test req.url then continue
fs.readFile (pfx ? '') + req.url[1..], (err, resp) ->
if err
# if not _.endsWith req.url, '.map'
do -> console.log 'bace: error serving file'.red, req.url, err
res.writeHead 404
res.end err.message
else
res.writeHead 200, 'Content-Type': mime
res.end resp
return
if req.url is '/'
res.writeHead 200
res.end page.render req, res
return
do -> console.log 'bace: invalid https request:', req.url
res.writeHead 404
res.end ''
srvr.listen socket
sock.startup srvr
|
[
{
"context": "\n# Test CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)\n\nfs = require '",
"end": 35,
"score": 0.999841570854187,
"start": 24,
"tag": "NAME",
"value": "David Worms"
},
{
"context": "\n# Test CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)\n... | test/transform.coffee | timoxley/node-csv-parser | 1 |
# Test CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)
fs = require 'fs'
assert = require 'assert'
csv = require '..'
module.exports =
'Test reorder fields': ->
count = 0
csv()
.fromPath("#{__dirname}/transform/reorder.in")
.toPath("#{__dirname}/transform/reorder.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
data.unshift data.pop()
return data
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/reorder.out").toString(),
fs.readFileSync("#{__dirname}/transform/reorder.tmp").toString()
)
fs.unlink "#{__dirname}/transform/reorder.tmp"
'Test return undefined - skip all lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/undefined.in')
.toPath("#{__dirname}/transform/undefined.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
return
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/undefined.out").toString(),
fs.readFileSync("#{__dirname}/transform/undefined.tmp").toString()
)
fs.unlink "#{__dirname}/transform/undefined.tmp"
'Test return null - skip one of two lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/null.in')
.toPath("#{__dirname}/transform/null.tmp")
.transform( (data, index) ->
assert.strictEqual(count,index)
count++
return if index % 2 then data else null
)
.on 'end', ->
assert.strictEqual(6,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/null.out").toString(),
fs.readFileSync("#{__dirname}/transform/null.tmp").toString()
)
fs.unlink "#{__dirname}/transform/null.tmp"
'Test return object': ->
# we don't define columns
# recieve and array and return an object
# also see the columns test
csv()
.fromPath(__dirname+'/transform/object.in')
.toPath("#{__dirname}/transform/object.tmp")
.transform( (data, index) ->
return { field_1: data[4], field_2: data[3] }
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/object.out").toString(),
fs.readFileSync("#{__dirname}/transform/object.tmp").toString()
)
fs.unlink("#{__dirname}/transform/object.tmp")
'Test return string': ->
csv()
.fromPath(__dirname+'/transform/string.in')
.toPath("#{__dirname}/transform/string.tmp")
.transform( (data, index) ->
return ( if index > 0 then ',' else '') + data[4] + ":" + data[3]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/string.out").toString(),
fs.readFileSync("#{__dirname}/transform/string.tmp").toString()
)
fs.unlink("#{__dirname}/transform/string.tmp")
'Test types': ->
# Test date, int and float
csv()
.fromPath(__dirname+'/transform/types.in')
.toPath("#{__dirname}/transform/types.tmp")
.transform( (data, index) ->
data[3] = data[3].split('-')
return [parseInt(data[0]), parseFloat(data[1]), parseFloat(data[2]) ,Date.UTC(data[3][0], data[3][1], data[3][2]), !!data[4], !!data[5]]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/types.out").toString(),
fs.readFileSync("#{__dirname}/transform/types.tmp").toString()
)
fs.unlink("#{__dirname}/transform/types.tmp")
| 196201 |
# Test CSV - Copyright <NAME> <<EMAIL>> (BSD Licensed)
fs = require 'fs'
assert = require 'assert'
csv = require '..'
module.exports =
'Test reorder fields': ->
count = 0
csv()
.fromPath("#{__dirname}/transform/reorder.in")
.toPath("#{__dirname}/transform/reorder.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
data.unshift data.pop()
return data
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/reorder.out").toString(),
fs.readFileSync("#{__dirname}/transform/reorder.tmp").toString()
)
fs.unlink "#{__dirname}/transform/reorder.tmp"
'Test return undefined - skip all lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/undefined.in')
.toPath("#{__dirname}/transform/undefined.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
return
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/undefined.out").toString(),
fs.readFileSync("#{__dirname}/transform/undefined.tmp").toString()
)
fs.unlink "#{__dirname}/transform/undefined.tmp"
'Test return null - skip one of two lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/null.in')
.toPath("#{__dirname}/transform/null.tmp")
.transform( (data, index) ->
assert.strictEqual(count,index)
count++
return if index % 2 then data else null
)
.on 'end', ->
assert.strictEqual(6,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/null.out").toString(),
fs.readFileSync("#{__dirname}/transform/null.tmp").toString()
)
fs.unlink "#{__dirname}/transform/null.tmp"
'Test return object': ->
# we don't define columns
# recieve and array and return an object
# also see the columns test
csv()
.fromPath(__dirname+'/transform/object.in')
.toPath("#{__dirname}/transform/object.tmp")
.transform( (data, index) ->
return { field_1: data[4], field_2: data[3] }
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/object.out").toString(),
fs.readFileSync("#{__dirname}/transform/object.tmp").toString()
)
fs.unlink("#{__dirname}/transform/object.tmp")
'Test return string': ->
csv()
.fromPath(__dirname+'/transform/string.in')
.toPath("#{__dirname}/transform/string.tmp")
.transform( (data, index) ->
return ( if index > 0 then ',' else '') + data[4] + ":" + data[3]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/string.out").toString(),
fs.readFileSync("#{__dirname}/transform/string.tmp").toString()
)
fs.unlink("#{__dirname}/transform/string.tmp")
'Test types': ->
# Test date, int and float
csv()
.fromPath(__dirname+'/transform/types.in')
.toPath("#{__dirname}/transform/types.tmp")
.transform( (data, index) ->
data[3] = data[3].split('-')
return [parseInt(data[0]), parseFloat(data[1]), parseFloat(data[2]) ,Date.UTC(data[3][0], data[3][1], data[3][2]), !!data[4], !!data[5]]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/types.out").toString(),
fs.readFileSync("#{__dirname}/transform/types.tmp").toString()
)
fs.unlink("#{__dirname}/transform/types.tmp")
| true |
# Test CSV - Copyright PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (BSD Licensed)
fs = require 'fs'
assert = require 'assert'
csv = require '..'
module.exports =
'Test reorder fields': ->
count = 0
csv()
.fromPath("#{__dirname}/transform/reorder.in")
.toPath("#{__dirname}/transform/reorder.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
data.unshift data.pop()
return data
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/reorder.out").toString(),
fs.readFileSync("#{__dirname}/transform/reorder.tmp").toString()
)
fs.unlink "#{__dirname}/transform/reorder.tmp"
'Test return undefined - skip all lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/undefined.in')
.toPath("#{__dirname}/transform/undefined.tmp")
.transform( (data, index) ->
assert.strictEqual count, index
count++
return
)
.on 'end', ->
assert.strictEqual 2, count
assert.equal(
fs.readFileSync("#{__dirname}/transform/undefined.out").toString(),
fs.readFileSync("#{__dirname}/transform/undefined.tmp").toString()
)
fs.unlink "#{__dirname}/transform/undefined.tmp"
'Test return null - skip one of two lines': ->
count = 0
csv()
.fromPath(__dirname+'/transform/null.in')
.toPath("#{__dirname}/transform/null.tmp")
.transform( (data, index) ->
assert.strictEqual(count,index)
count++
return if index % 2 then data else null
)
.on 'end', ->
assert.strictEqual(6,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/null.out").toString(),
fs.readFileSync("#{__dirname}/transform/null.tmp").toString()
)
fs.unlink "#{__dirname}/transform/null.tmp"
'Test return object': ->
# we don't define columns
# recieve and array and return an object
# also see the columns test
csv()
.fromPath(__dirname+'/transform/object.in')
.toPath("#{__dirname}/transform/object.tmp")
.transform( (data, index) ->
return { field_1: data[4], field_2: data[3] }
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/object.out").toString(),
fs.readFileSync("#{__dirname}/transform/object.tmp").toString()
)
fs.unlink("#{__dirname}/transform/object.tmp")
'Test return string': ->
csv()
.fromPath(__dirname+'/transform/string.in')
.toPath("#{__dirname}/transform/string.tmp")
.transform( (data, index) ->
return ( if index > 0 then ',' else '') + data[4] + ":" + data[3]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/string.out").toString(),
fs.readFileSync("#{__dirname}/transform/string.tmp").toString()
)
fs.unlink("#{__dirname}/transform/string.tmp")
'Test types': ->
# Test date, int and float
csv()
.fromPath(__dirname+'/transform/types.in')
.toPath("#{__dirname}/transform/types.tmp")
.transform( (data, index) ->
data[3] = data[3].split('-')
return [parseInt(data[0]), parseFloat(data[1]), parseFloat(data[2]) ,Date.UTC(data[3][0], data[3][1], data[3][2]), !!data[4], !!data[5]]
)
.on 'end', (count) ->
assert.strictEqual(2,count)
assert.equal(
fs.readFileSync("#{__dirname}/transform/types.out").toString(),
fs.readFileSync("#{__dirname}/transform/types.tmp").toString()
)
fs.unlink("#{__dirname}/transform/types.tmp")
|
[
{
"context": " user.email = @email\n user.password = @password\n user.profile = @profile\n user.prof",
"end": 486,
"score": 0.9989360570907593,
"start": 477,
"tag": "PASSWORD",
"value": "@password"
}
] | client/landing/signin.coffee | recursivefaults/beesnearme | 0 | SigninController = (location) ->
init = () =>
@signIn = signIn
@facebookSignIn = facebookSignIn
@errors = null
@isRegistering = false
@beginRegistration = () => @isRegistering = !@isRegistering
@register = register
@currentPosition = null
location().then (position) =>
@currentPosition = position.coords
register = () =>
user = {}
user.email = @email
user.password = @password
user.profile = @profile
user.profile.location = [@currentPosition.latitude, @currentPosition.longitude]
user.profile.coords = {lat: @currentPosition.latitude, lng: @currentPosition.longitude}
Accounts.createUser user, (error) ->
console.log error
facebookSignIn = () =>
Meteor.loginWithFacebook()
handleLoginError = (error) =>
console.log error
@errors = "Invalid user name or password"
@email = ""
@password = ""
signIn = () =>
@errors = null
Meteor.loginWithPassword @email, @password, handleLoginError
init()
return
SigninController.$inject = ['LocationService']
angular.module('bees-near-me').controller 'SigninController', SigninController
| 152848 | SigninController = (location) ->
init = () =>
@signIn = signIn
@facebookSignIn = facebookSignIn
@errors = null
@isRegistering = false
@beginRegistration = () => @isRegistering = !@isRegistering
@register = register
@currentPosition = null
location().then (position) =>
@currentPosition = position.coords
register = () =>
user = {}
user.email = @email
user.password = <PASSWORD>
user.profile = @profile
user.profile.location = [@currentPosition.latitude, @currentPosition.longitude]
user.profile.coords = {lat: @currentPosition.latitude, lng: @currentPosition.longitude}
Accounts.createUser user, (error) ->
console.log error
facebookSignIn = () =>
Meteor.loginWithFacebook()
handleLoginError = (error) =>
console.log error
@errors = "Invalid user name or password"
@email = ""
@password = ""
signIn = () =>
@errors = null
Meteor.loginWithPassword @email, @password, handleLoginError
init()
return
SigninController.$inject = ['LocationService']
angular.module('bees-near-me').controller 'SigninController', SigninController
| true | SigninController = (location) ->
init = () =>
@signIn = signIn
@facebookSignIn = facebookSignIn
@errors = null
@isRegistering = false
@beginRegistration = () => @isRegistering = !@isRegistering
@register = register
@currentPosition = null
location().then (position) =>
@currentPosition = position.coords
register = () =>
user = {}
user.email = @email
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.profile = @profile
user.profile.location = [@currentPosition.latitude, @currentPosition.longitude]
user.profile.coords = {lat: @currentPosition.latitude, lng: @currentPosition.longitude}
Accounts.createUser user, (error) ->
console.log error
facebookSignIn = () =>
Meteor.loginWithFacebook()
handleLoginError = (error) =>
console.log error
@errors = "Invalid user name or password"
@email = ""
@password = ""
signIn = () =>
@errors = null
Meteor.loginWithPassword @email, @password, handleLoginError
init()
return
SigninController.$inject = ['LocationService']
angular.module('bees-near-me').controller 'SigninController', SigninController
|
[
{
"context": "# Copyright (C) 2013 John Judnich\n# Released under The MIT License - see \"LICENSE\" ",
"end": 33,
"score": 0.9998717308044434,
"start": 21,
"tag": "NAME",
"value": "John Judnich"
}
] | source/FarMapGenerator.coffee | anandprabhakar0507/Kosmos | 46 | # Copyright (C) 2013 John Judnich
# Released under The MIT License - see "LICENSE" file for details.
root = exports ? this
class root.FarMapGenerator
constructor: (mapResolution) ->
# load shaders
@shader = []
for i in [0 .. kosmosShaderHeightFunctions.length-1]
@shader[i] = xgl.loadProgram("farMapGenerator" + i)
@shader[i].uniforms = xgl.getProgramUniforms(@shader[i], ["randomSeed"])
@shader[i].attribs = xgl.getProgramAttribs(@shader[i], ["aUV", "aPos", "aTangent", "aBinormal"])
# initialize FBO
@fbo = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
@fbo.width = mapResolution * 6 # six cube faces all packed into this single texture
@fbo.height = mapResolution
console.log("Initialized low resolution planet map generator FBO at #{@fbo.width} x #{@fbo.height}")
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
# create fullscreen quad vertices
buff = new Float32Array(6*6*11)
i = 0
tangent = [0, 0, 0]
binormal = [0, 0, 0]
for faceIndex in [0..5]
for uv in [[0,0], [1,0], [0,1], [1,0], [1,1], [0,1]]
pos = mapPlaneToCube(uv[0], uv[1], faceIndex)
buff[i++] = (uv[0] + faceIndex) / 6.0; buff[i++] = uv[1]
buff[i++] = pos[0]; buff[i++] = pos[1]; buff[i++] = pos[2]
posU = mapPlaneToCube(uv[0]+1, uv[1], faceIndex)
posV = mapPlaneToCube(uv[0], uv[1]+1, faceIndex)
binormal = [posU[0]-pos[0], posU[1]-pos[1], posU[2]-pos[2]]
tangent = [posV[0]-pos[0], posV[1]-pos[1], posV[2]-pos[2]]
buff[i++] = binormal[0]; buff[i++] = binormal[1]; buff[i++] = binormal[2]
buff[i++] = tangent[0]; buff[i++] = tangent[1]; buff[i++] = tangent[2]
@quadVerts = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts);
gl.bufferData(gl.ARRAY_BUFFER, buff, gl.STATIC_DRAW)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
@quadVerts.itemSize = 11
@quadVerts.numItems = buff.length / @quadVerts.itemSize
start: ->
gl.disable(gl.DEPTH_TEST)
gl.depthMask(false)
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
gl.viewport(0, 0, @fbo.width, @fbo.height)
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts)
gl.enableVertexAttribArray(@shader[0].attribs.aUV)
gl.enableVertexAttribArray(@shader[0].attribs.aPos)
gl.enableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.enableVertexAttribArray(@shader[0].attribs.aTangent)
finish: ->
gl.disableVertexAttribArray(@shader[0].attribs.aUV)
gl.disableVertexAttribArray(@shader[0].attribs.aPos)
gl.disableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.disableVertexAttribArray(@shader[0].attribs.aTangent)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
gl.useProgram(null)
gl.depthMask(true)
gl.enable(gl.DEPTH_TEST)
generate: (seed) ->
# setup seed values
rndStr = new RandomStream(seed)
seeds = [rndStr.unit(), rndStr.unit(), rndStr.unit()]
shaderIndex = rndStr.intRange(0, kosmosShaderHeightFunctions.length-1)
console.log("Using planet category " + shaderIndex)
# set shader from seed
gl.useProgram(@shader[shaderIndex])
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aUV, 2, gl.FLOAT, false, @quadVerts.itemSize*4, 0)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aPos, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *2)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aBinormal, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *5)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aTangent, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *8)
gl.uniform3fv(@shader[shaderIndex].uniforms.randomSeed, seeds)
# create and attach texture map as render target
heightMap = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, heightMap)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, @fbo.width, @fbo.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null)
gl.bindTexture(gl.TEXTURE_2D, null)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, heightMap, 0)
gl.drawArrays(gl.TRIANGLES, 0, @quadVerts.numItems);
return heightMap
| 73079 | # Copyright (C) 2013 <NAME>
# Released under The MIT License - see "LICENSE" file for details.
root = exports ? this
class root.FarMapGenerator
constructor: (mapResolution) ->
# load shaders
@shader = []
for i in [0 .. kosmosShaderHeightFunctions.length-1]
@shader[i] = xgl.loadProgram("farMapGenerator" + i)
@shader[i].uniforms = xgl.getProgramUniforms(@shader[i], ["randomSeed"])
@shader[i].attribs = xgl.getProgramAttribs(@shader[i], ["aUV", "aPos", "aTangent", "aBinormal"])
# initialize FBO
@fbo = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
@fbo.width = mapResolution * 6 # six cube faces all packed into this single texture
@fbo.height = mapResolution
console.log("Initialized low resolution planet map generator FBO at #{@fbo.width} x #{@fbo.height}")
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
# create fullscreen quad vertices
buff = new Float32Array(6*6*11)
i = 0
tangent = [0, 0, 0]
binormal = [0, 0, 0]
for faceIndex in [0..5]
for uv in [[0,0], [1,0], [0,1], [1,0], [1,1], [0,1]]
pos = mapPlaneToCube(uv[0], uv[1], faceIndex)
buff[i++] = (uv[0] + faceIndex) / 6.0; buff[i++] = uv[1]
buff[i++] = pos[0]; buff[i++] = pos[1]; buff[i++] = pos[2]
posU = mapPlaneToCube(uv[0]+1, uv[1], faceIndex)
posV = mapPlaneToCube(uv[0], uv[1]+1, faceIndex)
binormal = [posU[0]-pos[0], posU[1]-pos[1], posU[2]-pos[2]]
tangent = [posV[0]-pos[0], posV[1]-pos[1], posV[2]-pos[2]]
buff[i++] = binormal[0]; buff[i++] = binormal[1]; buff[i++] = binormal[2]
buff[i++] = tangent[0]; buff[i++] = tangent[1]; buff[i++] = tangent[2]
@quadVerts = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts);
gl.bufferData(gl.ARRAY_BUFFER, buff, gl.STATIC_DRAW)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
@quadVerts.itemSize = 11
@quadVerts.numItems = buff.length / @quadVerts.itemSize
start: ->
gl.disable(gl.DEPTH_TEST)
gl.depthMask(false)
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
gl.viewport(0, 0, @fbo.width, @fbo.height)
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts)
gl.enableVertexAttribArray(@shader[0].attribs.aUV)
gl.enableVertexAttribArray(@shader[0].attribs.aPos)
gl.enableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.enableVertexAttribArray(@shader[0].attribs.aTangent)
finish: ->
gl.disableVertexAttribArray(@shader[0].attribs.aUV)
gl.disableVertexAttribArray(@shader[0].attribs.aPos)
gl.disableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.disableVertexAttribArray(@shader[0].attribs.aTangent)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
gl.useProgram(null)
gl.depthMask(true)
gl.enable(gl.DEPTH_TEST)
generate: (seed) ->
# setup seed values
rndStr = new RandomStream(seed)
seeds = [rndStr.unit(), rndStr.unit(), rndStr.unit()]
shaderIndex = rndStr.intRange(0, kosmosShaderHeightFunctions.length-1)
console.log("Using planet category " + shaderIndex)
# set shader from seed
gl.useProgram(@shader[shaderIndex])
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aUV, 2, gl.FLOAT, false, @quadVerts.itemSize*4, 0)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aPos, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *2)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aBinormal, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *5)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aTangent, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *8)
gl.uniform3fv(@shader[shaderIndex].uniforms.randomSeed, seeds)
# create and attach texture map as render target
heightMap = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, heightMap)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, @fbo.width, @fbo.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null)
gl.bindTexture(gl.TEXTURE_2D, null)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, heightMap, 0)
gl.drawArrays(gl.TRIANGLES, 0, @quadVerts.numItems);
return heightMap
| true | # Copyright (C) 2013 PI:NAME:<NAME>END_PI
# Released under The MIT License - see "LICENSE" file for details.
root = exports ? this
class root.FarMapGenerator
constructor: (mapResolution) ->
# load shaders
@shader = []
for i in [0 .. kosmosShaderHeightFunctions.length-1]
@shader[i] = xgl.loadProgram("farMapGenerator" + i)
@shader[i].uniforms = xgl.getProgramUniforms(@shader[i], ["randomSeed"])
@shader[i].attribs = xgl.getProgramAttribs(@shader[i], ["aUV", "aPos", "aTangent", "aBinormal"])
# initialize FBO
@fbo = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
@fbo.width = mapResolution * 6 # six cube faces all packed into this single texture
@fbo.height = mapResolution
console.log("Initialized low resolution planet map generator FBO at #{@fbo.width} x #{@fbo.height}")
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
# create fullscreen quad vertices
buff = new Float32Array(6*6*11)
i = 0
tangent = [0, 0, 0]
binormal = [0, 0, 0]
for faceIndex in [0..5]
for uv in [[0,0], [1,0], [0,1], [1,0], [1,1], [0,1]]
pos = mapPlaneToCube(uv[0], uv[1], faceIndex)
buff[i++] = (uv[0] + faceIndex) / 6.0; buff[i++] = uv[1]
buff[i++] = pos[0]; buff[i++] = pos[1]; buff[i++] = pos[2]
posU = mapPlaneToCube(uv[0]+1, uv[1], faceIndex)
posV = mapPlaneToCube(uv[0], uv[1]+1, faceIndex)
binormal = [posU[0]-pos[0], posU[1]-pos[1], posU[2]-pos[2]]
tangent = [posV[0]-pos[0], posV[1]-pos[1], posV[2]-pos[2]]
buff[i++] = binormal[0]; buff[i++] = binormal[1]; buff[i++] = binormal[2]
buff[i++] = tangent[0]; buff[i++] = tangent[1]; buff[i++] = tangent[2]
@quadVerts = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts);
gl.bufferData(gl.ARRAY_BUFFER, buff, gl.STATIC_DRAW)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
@quadVerts.itemSize = 11
@quadVerts.numItems = buff.length / @quadVerts.itemSize
start: ->
gl.disable(gl.DEPTH_TEST)
gl.depthMask(false)
gl.bindFramebuffer(gl.FRAMEBUFFER, @fbo)
gl.viewport(0, 0, @fbo.width, @fbo.height)
gl.bindBuffer(gl.ARRAY_BUFFER, @quadVerts)
gl.enableVertexAttribArray(@shader[0].attribs.aUV)
gl.enableVertexAttribArray(@shader[0].attribs.aPos)
gl.enableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.enableVertexAttribArray(@shader[0].attribs.aTangent)
finish: ->
gl.disableVertexAttribArray(@shader[0].attribs.aUV)
gl.disableVertexAttribArray(@shader[0].attribs.aPos)
gl.disableVertexAttribArray(@shader[0].attribs.aBinormal)
gl.disableVertexAttribArray(@shader[0].attribs.aTangent)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
gl.useProgram(null)
gl.depthMask(true)
gl.enable(gl.DEPTH_TEST)
generate: (seed) ->
# setup seed values
rndStr = new RandomStream(seed)
seeds = [rndStr.unit(), rndStr.unit(), rndStr.unit()]
shaderIndex = rndStr.intRange(0, kosmosShaderHeightFunctions.length-1)
console.log("Using planet category " + shaderIndex)
# set shader from seed
gl.useProgram(@shader[shaderIndex])
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aUV, 2, gl.FLOAT, false, @quadVerts.itemSize*4, 0)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aPos, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *2)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aBinormal, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *5)
gl.vertexAttribPointer(@shader[shaderIndex].attribs.aTangent, 3, gl.FLOAT, false, @quadVerts.itemSize*4, 4 *8)
gl.uniform3fv(@shader[shaderIndex].uniforms.randomSeed, seeds)
# create and attach texture map as render target
heightMap = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, heightMap)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, @fbo.width, @fbo.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null)
gl.bindTexture(gl.TEXTURE_2D, null)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, heightMap, 0)
gl.drawArrays(gl.TRIANGLES, 0, @quadVerts.numItems);
return heightMap
|
[
{
"context": " connector: 'peter-parker'\n token: 'some-token'\n gateblu:\n running: true\n ",
"end": 1144,
"score": 0.48237359523773193,
"start": 1134,
"tag": "KEY",
"value": "some-token"
},
{
"context": " connector: 'peter-parker'\n ... | test/device-manager-spec.coffee | octoblu/node-gateblu-service | 0 | fs = require 'fs-extra'
path = require 'path'
DeviceManager = require '../src/device-manager'
ProcessManager = require '../src/process-manager'
describe 'DeviceManager', ->
before ->
@buildPath = path.join __dirname, '..', 'tmp'
fs.removeSync(path.join(@buildPath, 'node_modules'))
fs.removeSync(path.join(@buildPath, 'pids'))
beforeEach ->
@processManager = new ProcessManager {@buildPath}
@sut = new DeviceManager {@buildPath, meshbluConfig: {server:'localhost',port:0xd00d}}
afterEach (done) ->
@sut.shutdown done
describe 'when nothing is running', ->
it 'should not have spiderman running', ->
expect(@processManager.isRunning uuid: 'spiderman').to.be.false
it 'should not have any processes', (done) ->
@processManager.getAllProcesses (error, items) =>
return done error if error?
expect(items).to.be.empty
done()
describe 'when a device is started', ->
beforeEach (done) ->
@timeout 50000
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: 'some-token'
gateblu:
running: true
connector: 'gateblu-test-connector'
@sut.start spiderman, (error) =>
@firstPid = @processManager.get spiderman
done error
it 'should write a pid file', ->
expect(@processManager.exists {uuid: 'spiderman'}).to.be.true
it 'should the process should be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.true
describe 'when the device is started again', ->
beforeEach (done) ->
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: 'some-token'
gateblu:
running: false
connector: 'gateblu-test-connector'
@timeout 10000
@sut.start spiderman, (error) =>
@secondPid = @processManager.get spiderman
done error
it 'should not change the pid', ->
expect(@firstPid).to.equal @secondPid
describe 'when a device is start but is missing the connector', ->
beforeEach (done) ->
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: 'some-token'
gateblu:
running: true
@sut.start spiderman, (@error) => done()
it 'should not have error', ->
expect(@error).to.not.exist
it 'should not write a pid file', ->
expect(@processManager.exists {uuid: 'spiderman'}).to.be.false
it 'should the process should not be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.false
| 152523 | fs = require 'fs-extra'
path = require 'path'
DeviceManager = require '../src/device-manager'
ProcessManager = require '../src/process-manager'
describe 'DeviceManager', ->
before ->
@buildPath = path.join __dirname, '..', 'tmp'
fs.removeSync(path.join(@buildPath, 'node_modules'))
fs.removeSync(path.join(@buildPath, 'pids'))
beforeEach ->
@processManager = new ProcessManager {@buildPath}
@sut = new DeviceManager {@buildPath, meshbluConfig: {server:'localhost',port:0xd00d}}
afterEach (done) ->
@sut.shutdown done
describe 'when nothing is running', ->
it 'should not have spiderman running', ->
expect(@processManager.isRunning uuid: 'spiderman').to.be.false
it 'should not have any processes', (done) ->
@processManager.getAllProcesses (error, items) =>
return done error if error?
expect(items).to.be.empty
done()
describe 'when a device is started', ->
beforeEach (done) ->
@timeout 50000
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: '<KEY>'
gateblu:
running: true
connector: 'gateblu-test-connector'
@sut.start spiderman, (error) =>
@firstPid = @processManager.get spiderman
done error
it 'should write a pid file', ->
expect(@processManager.exists {uuid: 'spiderman'}).to.be.true
it 'should the process should be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.true
describe 'when the device is started again', ->
beforeEach (done) ->
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: '<KEY>'
gateblu:
running: false
connector: 'gateblu-test-connector'
@timeout 10000
@sut.start spiderman, (error) =>
@secondPid = @processManager.get spiderman
done error
it 'should not change the pid', ->
expect(@firstPid).to.equal @secondPid
describe 'when a device is start but is missing the connector', ->
beforeEach (done) ->
spiderman =
uuid: '<NAME>man'
type: 'superhero'
connector: 'peter-parker'
token: '<KEY> <PASSWORD>-<KEY>'
gateblu:
running: true
@sut.start spiderman, (@error) => done()
it 'should not have error', ->
expect(@error).to.not.exist
it 'should not write a pid file', ->
expect(@processManager.exists {uuid: '<NAME>man'}).to.be.false
it 'should the process should not be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.false
| true | fs = require 'fs-extra'
path = require 'path'
DeviceManager = require '../src/device-manager'
ProcessManager = require '../src/process-manager'
describe 'DeviceManager', ->
before ->
@buildPath = path.join __dirname, '..', 'tmp'
fs.removeSync(path.join(@buildPath, 'node_modules'))
fs.removeSync(path.join(@buildPath, 'pids'))
beforeEach ->
@processManager = new ProcessManager {@buildPath}
@sut = new DeviceManager {@buildPath, meshbluConfig: {server:'localhost',port:0xd00d}}
afterEach (done) ->
@sut.shutdown done
describe 'when nothing is running', ->
it 'should not have spiderman running', ->
expect(@processManager.isRunning uuid: 'spiderman').to.be.false
it 'should not have any processes', (done) ->
@processManager.getAllProcesses (error, items) =>
return done error if error?
expect(items).to.be.empty
done()
describe 'when a device is started', ->
beforeEach (done) ->
@timeout 50000
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: 'PI:KEY:<KEY>END_PI'
gateblu:
running: true
connector: 'gateblu-test-connector'
@sut.start spiderman, (error) =>
@firstPid = @processManager.get spiderman
done error
it 'should write a pid file', ->
expect(@processManager.exists {uuid: 'spiderman'}).to.be.true
it 'should the process should be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.true
describe 'when the device is started again', ->
beforeEach (done) ->
spiderman =
uuid: 'spiderman'
type: 'superhero'
connector: 'peter-parker'
token: 'PI:KEY:<KEY>END_PI'
gateblu:
running: false
connector: 'gateblu-test-connector'
@timeout 10000
@sut.start spiderman, (error) =>
@secondPid = @processManager.get spiderman
done error
it 'should not change the pid', ->
expect(@firstPid).to.equal @secondPid
describe 'when a device is start but is missing the connector', ->
beforeEach (done) ->
spiderman =
uuid: 'PI:NAME:<NAME>END_PIman'
type: 'superhero'
connector: 'peter-parker'
token: 'PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI'
gateblu:
running: true
@sut.start spiderman, (@error) => done()
it 'should not have error', ->
expect(@error).to.not.exist
it 'should not write a pid file', ->
expect(@processManager.exists {uuid: 'PI:NAME:<NAME>END_PIman'}).to.be.false
it 'should the process should not be running', ->
expect(@processManager.isRunning {uuid: 'spiderman'}).to.be.false
|
[
{
"context": "#############\n#\n#\tMoocita collections\n# Created by Markus on 26/10/2015.\n#\n################################",
"end": 99,
"score": 0.9995125532150269,
"start": 93,
"tag": "NAME",
"value": "Markus"
}
] | server/publications/posts.coffee | MooqitaSFH/worklearn | 0 | #######################################################
#
# Moocita collections
# Created by Markus on 26/10/2015.
#
#######################################################
#######################################################
Meteor.publish "posts", (group_name) ->
check group_name, String
user_id = this.userId
filter =
group_name: group_name
visible_to: "all"
crs = Posts.find filter
log_publication crs, user_id, "posts"
return crs
| 154511 | #######################################################
#
# Moocita collections
# Created by <NAME> on 26/10/2015.
#
#######################################################
#######################################################
Meteor.publish "posts", (group_name) ->
check group_name, String
user_id = this.userId
filter =
group_name: group_name
visible_to: "all"
crs = Posts.find filter
log_publication crs, user_id, "posts"
return crs
| true | #######################################################
#
# Moocita collections
# Created by PI:NAME:<NAME>END_PI on 26/10/2015.
#
#######################################################
#######################################################
Meteor.publish "posts", (group_name) ->
check group_name, String
user_id = this.userId
filter =
group_name: group_name
visible_to: "all"
crs = Posts.find filter
log_publication crs, user_id, "posts"
return crs
|
[
{
"context": "\n# batman.jquery.coffee\n# batman.js\n#\n# Created by Nick Small\n# Copyright 2011, Shopify\n#\n\n# Include this file ",
"end": 62,
"score": 0.9996473789215088,
"start": 52,
"tag": "NAME",
"value": "Nick Small"
},
{
"context": " data: data || @get 'data'\n userna... | src/batman.jquery.coffee | nickjs/batman | 1 | #
# batman.jquery.coffee
# batman.js
#
# Created by Nick Small
# Copyright 2011, Shopify
#
# Include this file instead of batman.nodep if your
# project already uses jQuery. It will map a few
# batman.js methods to existing jQuery methods.
Batman.Request::send = (data) ->
options =
url: @get 'url'
type: @get 'method'
dataType: @get 'type'
data: data || @get 'data'
username: @get 'username'
password: @get 'password'
beforeSend: =>
@loading yes
success: (response, textStatus, xhr) =>
@set 'status', xhr.status
@set 'response', response
@success response
error: (xhr, status, error) =>
@set 'status', xhr.status
@set 'response', xhr.responseText
xhr.request = @
@error xhr
complete: =>
@loading no
@loaded yes
if @get('method') in ['PUT', 'POST']
unless @get 'formData'
options.contentType = @get 'contentType'
else
options.contentType = false
options.processData = false
options.data = @constructor.objectToFormData(options.data)
jQuery.ajax options
Batman.mixins.animation =
show: (addToParent) ->
jq = $(@)
show = ->
jq.show 600
if addToParent
addToParent.append?.appendChild @
addToParent.before?.parentNode.insertBefore @, addToParent.before
jq.hide()
setTimeout show, 0
else
show()
@
hide: (removeFromParent) ->
$(@).hide 600, =>
@parentNode?.removeChild @ if removeFromParent
@
| 75998 | #
# batman.jquery.coffee
# batman.js
#
# Created by <NAME>
# Copyright 2011, Shopify
#
# Include this file instead of batman.nodep if your
# project already uses jQuery. It will map a few
# batman.js methods to existing jQuery methods.
Batman.Request::send = (data) ->
options =
url: @get 'url'
type: @get 'method'
dataType: @get 'type'
data: data || @get 'data'
username: @get 'username'
password: @get 'password'
beforeSend: =>
@loading yes
success: (response, textStatus, xhr) =>
@set 'status', xhr.status
@set 'response', response
@success response
error: (xhr, status, error) =>
@set 'status', xhr.status
@set 'response', xhr.responseText
xhr.request = @
@error xhr
complete: =>
@loading no
@loaded yes
if @get('method') in ['PUT', 'POST']
unless @get 'formData'
options.contentType = @get 'contentType'
else
options.contentType = false
options.processData = false
options.data = @constructor.objectToFormData(options.data)
jQuery.ajax options
Batman.mixins.animation =
show: (addToParent) ->
jq = $(@)
show = ->
jq.show 600
if addToParent
addToParent.append?.appendChild @
addToParent.before?.parentNode.insertBefore @, addToParent.before
jq.hide()
setTimeout show, 0
else
show()
@
hide: (removeFromParent) ->
$(@).hide 600, =>
@parentNode?.removeChild @ if removeFromParent
@
| true | #
# batman.jquery.coffee
# batman.js
#
# Created by PI:NAME:<NAME>END_PI
# Copyright 2011, Shopify
#
# Include this file instead of batman.nodep if your
# project already uses jQuery. It will map a few
# batman.js methods to existing jQuery methods.
Batman.Request::send = (data) ->
options =
url: @get 'url'
type: @get 'method'
dataType: @get 'type'
data: data || @get 'data'
username: @get 'username'
password: @get 'password'
beforeSend: =>
@loading yes
success: (response, textStatus, xhr) =>
@set 'status', xhr.status
@set 'response', response
@success response
error: (xhr, status, error) =>
@set 'status', xhr.status
@set 'response', xhr.responseText
xhr.request = @
@error xhr
complete: =>
@loading no
@loaded yes
if @get('method') in ['PUT', 'POST']
unless @get 'formData'
options.contentType = @get 'contentType'
else
options.contentType = false
options.processData = false
options.data = @constructor.objectToFormData(options.data)
jQuery.ajax options
Batman.mixins.animation =
show: (addToParent) ->
jq = $(@)
show = ->
jq.show 600
if addToParent
addToParent.append?.appendChild @
addToParent.before?.parentNode.insertBefore @, addToParent.before
jq.hide()
setTimeout show, 0
else
show()
@
hide: (removeFromParent) ->
$(@).hide 600, =>
@parentNode?.removeChild @ if removeFromParent
@
|
[
{
"context": " testNotificate: (test) ->\n emails = ['ya@Mbilo.com']\n source = 'source'\n Impor",
"end": 970,
"score": 0.9998531341552734,
"start": 958,
"tag": "EMAIL",
"value": "ya@Mbilo.com"
},
{
"context": " dataprovider(test, [\n ... | src/tests/server/import/notification/test_notificator.coffee | LaPingvino/rizzoma | 88 | global.getLogger = () ->
return {
error: ->
log: ->
}
sinon = require('sinon-plus')
testCase = require('nodeunit').testCase
dataprovider = require('dataprovider')
Conf = require('../../../../server/conf').Conf
ImportSourceParser = require('../../../../server/import/source_parser').ImportSourceParser
UserCouchProcessor = require('../../../../server/user/couch_processor').UserCouchProcessor
#CouchImportProcessor = require('../../../../server/import/couch_processor').CouchImportProcessor
ImportNotificator = require('../../../../server/import/notification').ImportNotificator
CouchImportNotificationProcessor = require('../../../../server/import/notification/couch_processor').CouchImportNotificationProcessor
module.exports =
ImportSourceParserTest: testCase
setUp: (callback) ->
callback()
tearDown: (callback) ->
callback()
testNotificate: (test) ->
emails = ['ya@Mbilo.com']
source = 'source'
ImportNotificator._smtp = {}
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getEmailsToSendNotification')
.withArgs(emails)
.once()
.callsArgWith(1, null, emails)
ImportNotificatorMock
.expects('_notificateEmails')
.withArgs(emails, source)
.once()
.callsArgWith(2, null, 'ok')
ImportNotificator.notificate(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_getEmailsToSendNotification: (test) ->
code = (done, exp, emails, loadedEmails) ->
CouchImportNotificationProcessorMock = sinon.mock(CouchImportNotificationProcessor)
CouchImportNotificationProcessorMock
.expects('getByIdsAsDict')
.withArgs(emails)
.once()
.callsArgWith(1, null, loadedEmails)
ImportNotificator._getEmailsToSendNotification(emails, (err, toSend) ->
test.deepEqual(exp, toSend)
sinon.verifyAll()
sinon.restoreAll()
done()
)
dataprovider(test, [
[['non@exist.email'], ['non@exist.email'], {'ya@exist.email':'ya@exist.email'}]
[[], ['ya@exist.email'], {'ya@exist.email':'ya@exist.email'}]
], code)
test_getNotificationContext: (test) ->
source =
userId: '0_u_1'
importedWaveUrl: 'sasadfcsaddfghghn345dcv'
sourceData: 'sourceData'
exp =
user: 'user'
waveLink: Conf.get('baseUrl') + "/wave/sasadfcsaddfghghn345dcv/?utm_source=email&utm_medium=body&utm_campaign=exportwave"
waveTitle: "waveTitle"
UserProcessorMock = sinon.mock(UserCouchProcessor)
UserProcessorMock
.expects('getById')
.withArgs(source.userId)
.once()
.callsArgWith(1, null, 'user')
ImportSourceParserMock = sinon.mock(ImportSourceParser)
ImportSourceParserMock
.expects('getWaveTitle')
.withExactArgs('sourceData')
.once()
.returns('waveTitle')
ImportNotificator._getNotificationContext(source, (err, context) ->
test.deepEqual(exp, context)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmails: (test) ->
emails = ['ya@exist.email', 'i-ya@exist.email']
source =
id: 'source_id'
context = 'ya context'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getNotificationContext')
.withArgs(source)
.once()
.callsArgWith(1, null, context)
ImportNotificatorMock
.expects('_sendNotificationsAndSaveResults')
.withArgs(emails, context, source.id)
.once()
.callsArgWith(3, null, 'ok')
ImportNotificator._notificateEmails(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_sendNotificationsAndSaveResults: (test) ->
emailsToSend = ['ya@exist.email', 'i-ya@exist.email']
expRes =
'ya@exist.email': null
'i-ya@exist.email': {error:'error'}
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('ya@exist.email', context, sourceId)
.once()
.callsArgWith(3, null, null)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('i-ya@exist.email', context, sourceId)
.once()
.callsArgWith(3, {error:'error'}, null)
ImportNotificator._sendNotificationsAndSaveResults(emailsToSend, context, sourceId, (err, res) ->
test.deepEqual(expRes, res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmailAndSaveResult: (test) ->
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmail')
.withArgs('ya@exist.email', context)
.once()
.callsArgWith(2, null, null)
ImportNotificatorMock
.expects('_saveNotification')
.withArgs('ya@exist.email', sourceId)
.once()
.callsArgWith(2, null, null)
ImportNotificator._notificateEmailAndSaveResult('ya@exist.email', context, sourceId, (err, res) ->
test.deepEqual(null, null)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
| 201595 | global.getLogger = () ->
return {
error: ->
log: ->
}
sinon = require('sinon-plus')
testCase = require('nodeunit').testCase
dataprovider = require('dataprovider')
Conf = require('../../../../server/conf').Conf
ImportSourceParser = require('../../../../server/import/source_parser').ImportSourceParser
UserCouchProcessor = require('../../../../server/user/couch_processor').UserCouchProcessor
#CouchImportProcessor = require('../../../../server/import/couch_processor').CouchImportProcessor
ImportNotificator = require('../../../../server/import/notification').ImportNotificator
CouchImportNotificationProcessor = require('../../../../server/import/notification/couch_processor').CouchImportNotificationProcessor
module.exports =
ImportSourceParserTest: testCase
setUp: (callback) ->
callback()
tearDown: (callback) ->
callback()
testNotificate: (test) ->
emails = ['<EMAIL>']
source = 'source'
ImportNotificator._smtp = {}
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getEmailsToSendNotification')
.withArgs(emails)
.once()
.callsArgWith(1, null, emails)
ImportNotificatorMock
.expects('_notificateEmails')
.withArgs(emails, source)
.once()
.callsArgWith(2, null, 'ok')
ImportNotificator.notificate(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_getEmailsToSendNotification: (test) ->
code = (done, exp, emails, loadedEmails) ->
CouchImportNotificationProcessorMock = sinon.mock(CouchImportNotificationProcessor)
CouchImportNotificationProcessorMock
.expects('getByIdsAsDict')
.withArgs(emails)
.once()
.callsArgWith(1, null, loadedEmails)
ImportNotificator._getEmailsToSendNotification(emails, (err, toSend) ->
test.deepEqual(exp, toSend)
sinon.verifyAll()
sinon.restoreAll()
done()
)
dataprovider(test, [
[['<EMAIL>'], ['<EMAIL>'], {'<EMAIL>':'<EMAIL>'}]
[[], ['<EMAIL>'], {'<EMAIL>':'<EMAIL>'}]
], code)
test_getNotificationContext: (test) ->
source =
userId: '0_u_1'
importedWaveUrl: 'sasadfcsaddfghghn345dcv'
sourceData: 'sourceData'
exp =
user: 'user'
waveLink: Conf.get('baseUrl') + "/wave/sasadfcsaddfghghn345dcv/?utm_source=email&utm_medium=body&utm_campaign=exportwave"
waveTitle: "waveTitle"
UserProcessorMock = sinon.mock(UserCouchProcessor)
UserProcessorMock
.expects('getById')
.withArgs(source.userId)
.once()
.callsArgWith(1, null, 'user')
ImportSourceParserMock = sinon.mock(ImportSourceParser)
ImportSourceParserMock
.expects('getWaveTitle')
.withExactArgs('sourceData')
.once()
.returns('waveTitle')
ImportNotificator._getNotificationContext(source, (err, context) ->
test.deepEqual(exp, context)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmails: (test) ->
emails = ['<EMAIL>', '<EMAIL>']
source =
id: 'source_id'
context = 'ya context'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getNotificationContext')
.withArgs(source)
.once()
.callsArgWith(1, null, context)
ImportNotificatorMock
.expects('_sendNotificationsAndSaveResults')
.withArgs(emails, context, source.id)
.once()
.callsArgWith(3, null, 'ok')
ImportNotificator._notificateEmails(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_sendNotificationsAndSaveResults: (test) ->
emailsToSend = ['<EMAIL>', '<EMAIL>']
expRes =
'<EMAIL>': null
'<EMAIL>': {error:'error'}
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('<EMAIL>', context, sourceId)
.once()
.callsArgWith(3, null, null)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('<EMAIL>', context, sourceId)
.once()
.callsArgWith(3, {error:'error'}, null)
ImportNotificator._sendNotificationsAndSaveResults(emailsToSend, context, sourceId, (err, res) ->
test.deepEqual(expRes, res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmailAndSaveResult: (test) ->
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmail')
.withArgs('<EMAIL>', context)
.once()
.callsArgWith(2, null, null)
ImportNotificatorMock
.expects('_saveNotification')
.withArgs('<EMAIL>', sourceId)
.once()
.callsArgWith(2, null, null)
ImportNotificator._notificateEmailAndSaveResult('<EMAIL>', context, sourceId, (err, res) ->
test.deepEqual(null, null)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
| true | global.getLogger = () ->
return {
error: ->
log: ->
}
sinon = require('sinon-plus')
testCase = require('nodeunit').testCase
dataprovider = require('dataprovider')
Conf = require('../../../../server/conf').Conf
ImportSourceParser = require('../../../../server/import/source_parser').ImportSourceParser
UserCouchProcessor = require('../../../../server/user/couch_processor').UserCouchProcessor
#CouchImportProcessor = require('../../../../server/import/couch_processor').CouchImportProcessor
ImportNotificator = require('../../../../server/import/notification').ImportNotificator
CouchImportNotificationProcessor = require('../../../../server/import/notification/couch_processor').CouchImportNotificationProcessor
module.exports =
ImportSourceParserTest: testCase
setUp: (callback) ->
callback()
tearDown: (callback) ->
callback()
testNotificate: (test) ->
emails = ['PI:EMAIL:<EMAIL>END_PI']
source = 'source'
ImportNotificator._smtp = {}
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getEmailsToSendNotification')
.withArgs(emails)
.once()
.callsArgWith(1, null, emails)
ImportNotificatorMock
.expects('_notificateEmails')
.withArgs(emails, source)
.once()
.callsArgWith(2, null, 'ok')
ImportNotificator.notificate(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_getEmailsToSendNotification: (test) ->
code = (done, exp, emails, loadedEmails) ->
CouchImportNotificationProcessorMock = sinon.mock(CouchImportNotificationProcessor)
CouchImportNotificationProcessorMock
.expects('getByIdsAsDict')
.withArgs(emails)
.once()
.callsArgWith(1, null, loadedEmails)
ImportNotificator._getEmailsToSendNotification(emails, (err, toSend) ->
test.deepEqual(exp, toSend)
sinon.verifyAll()
sinon.restoreAll()
done()
)
dataprovider(test, [
[['PI:EMAIL:<EMAIL>END_PI'], ['PI:EMAIL:<EMAIL>END_PI'], {'PI:EMAIL:<EMAIL>END_PI':'PI:EMAIL:<EMAIL>END_PI'}]
[[], ['PI:EMAIL:<EMAIL>END_PI'], {'PI:EMAIL:<EMAIL>END_PI':'PI:EMAIL:<EMAIL>END_PI'}]
], code)
test_getNotificationContext: (test) ->
source =
userId: '0_u_1'
importedWaveUrl: 'sasadfcsaddfghghn345dcv'
sourceData: 'sourceData'
exp =
user: 'user'
waveLink: Conf.get('baseUrl') + "/wave/sasadfcsaddfghghn345dcv/?utm_source=email&utm_medium=body&utm_campaign=exportwave"
waveTitle: "waveTitle"
UserProcessorMock = sinon.mock(UserCouchProcessor)
UserProcessorMock
.expects('getById')
.withArgs(source.userId)
.once()
.callsArgWith(1, null, 'user')
ImportSourceParserMock = sinon.mock(ImportSourceParser)
ImportSourceParserMock
.expects('getWaveTitle')
.withExactArgs('sourceData')
.once()
.returns('waveTitle')
ImportNotificator._getNotificationContext(source, (err, context) ->
test.deepEqual(exp, context)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmails: (test) ->
emails = ['PI:EMAIL:<EMAIL>END_PI', 'PI:EMAIL:<EMAIL>END_PI']
source =
id: 'source_id'
context = 'ya context'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_getNotificationContext')
.withArgs(source)
.once()
.callsArgWith(1, null, context)
ImportNotificatorMock
.expects('_sendNotificationsAndSaveResults')
.withArgs(emails, context, source.id)
.once()
.callsArgWith(3, null, 'ok')
ImportNotificator._notificateEmails(emails, source, (err, res) ->
test.equal('ok', res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_sendNotificationsAndSaveResults: (test) ->
emailsToSend = ['PI:EMAIL:<EMAIL>END_PI', 'PI:EMAIL:<EMAIL>END_PI']
expRes =
'PI:EMAIL:<EMAIL>END_PI': null
'PI:EMAIL:<EMAIL>END_PI': {error:'error'}
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('PI:EMAIL:<EMAIL>END_PI', context, sourceId)
.once()
.callsArgWith(3, null, null)
ImportNotificatorMock
.expects('_notificateEmailAndSaveResult')
.withArgs('PI:EMAIL:<EMAIL>END_PI', context, sourceId)
.once()
.callsArgWith(3, {error:'error'}, null)
ImportNotificator._sendNotificationsAndSaveResults(emailsToSend, context, sourceId, (err, res) ->
test.deepEqual(expRes, res)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
test_notificateEmailAndSaveResult: (test) ->
context = 'ya context'
sourceId = 'ya sourceId'
ImportNotificatorMock = sinon.mock(ImportNotificator)
ImportNotificatorMock
.expects('_notificateEmail')
.withArgs('PI:EMAIL:<EMAIL>END_PI', context)
.once()
.callsArgWith(2, null, null)
ImportNotificatorMock
.expects('_saveNotification')
.withArgs('PI:EMAIL:<EMAIL>END_PI', sourceId)
.once()
.callsArgWith(2, null, null)
ImportNotificator._notificateEmailAndSaveResult('PI:EMAIL:<EMAIL>END_PI', context, sourceId, (err, res) ->
test.deepEqual(null, null)
sinon.verifyAll()
sinon.restoreAll()
test.done()
)
|
[
{
"context": " for three.js with every basics you need\n# @author David Ronai / Makiopolis.com / @Makio64 \n# \nclass Stage3d\n\n\t@",
"end": 74,
"score": 0.9998455047607422,
"start": 63,
"tag": "NAME",
"value": "David Ronai"
},
{
"context": "th every basics you need\n# @author David... | src/coffee/core/3d/Stage3d.coffee | Makio64/Kyari | 0 | #
# Stage3d for three.js with every basics you need
# @author David Ronai / Makiopolis.com / @Makio64
#
class Stage3d
@camera = null
@cameraTarget = null
@scene = null
@renderer = null
@init = (options)->
w = window.innerWidth
h = window.innerHeight
@camera = new THREE.PerspectiveCamera( 40, w / h, 1, 10000 )
@camera.position.z = 15000
@cameraTarget = new THREE.Vector3(0,0,100)
@scene = new THREE.Scene()
transparent = options.transparent||false
antialias = options.antialias||false
@renderer = new THREE.WebGLRenderer({alpha:transparent,antialias:antialias})
@renderer.setSize( w, h )
document.body.appendChild(@renderer.domElement)
return
@add = (obj)->
@scene.add(obj)
return
@render = ()->
@camera.position.x += (@cameraTarget.x-@camera.position.x)*0.05
@camera.position.y += (@cameraTarget.y-@camera.position.y)*0.05
@camera.position.z += (@cameraTarget.z-@camera.position.z)*0.05
@camera.lookAt(@scene.position)
Stage3d.renderer.render(@scene, @camera)
return
@resize = ()->
if @renderer
@camera.aspect = window.innerWidth / window.innerHeight
@camera.updateProjectionMatrix()
@renderer.setSize( window.innerWidth, window.innerHeight )
return | 145257 | #
# Stage3d for three.js with every basics you need
# @author <NAME> / M<EMAIL> / @Makio64
#
class Stage3d
@camera = null
@cameraTarget = null
@scene = null
@renderer = null
@init = (options)->
w = window.innerWidth
h = window.innerHeight
@camera = new THREE.PerspectiveCamera( 40, w / h, 1, 10000 )
@camera.position.z = 15000
@cameraTarget = new THREE.Vector3(0,0,100)
@scene = new THREE.Scene()
transparent = options.transparent||false
antialias = options.antialias||false
@renderer = new THREE.WebGLRenderer({alpha:transparent,antialias:antialias})
@renderer.setSize( w, h )
document.body.appendChild(@renderer.domElement)
return
@add = (obj)->
@scene.add(obj)
return
@render = ()->
@camera.position.x += (@cameraTarget.x-@camera.position.x)*0.05
@camera.position.y += (@cameraTarget.y-@camera.position.y)*0.05
@camera.position.z += (@cameraTarget.z-@camera.position.z)*0.05
@camera.lookAt(@scene.position)
Stage3d.renderer.render(@scene, @camera)
return
@resize = ()->
if @renderer
@camera.aspect = window.innerWidth / window.innerHeight
@camera.updateProjectionMatrix()
@renderer.setSize( window.innerWidth, window.innerHeight )
return | true | #
# Stage3d for three.js with every basics you need
# @author PI:NAME:<NAME>END_PI / MPI:EMAIL:<EMAIL>END_PI / @Makio64
#
class Stage3d
@camera = null
@cameraTarget = null
@scene = null
@renderer = null
@init = (options)->
w = window.innerWidth
h = window.innerHeight
@camera = new THREE.PerspectiveCamera( 40, w / h, 1, 10000 )
@camera.position.z = 15000
@cameraTarget = new THREE.Vector3(0,0,100)
@scene = new THREE.Scene()
transparent = options.transparent||false
antialias = options.antialias||false
@renderer = new THREE.WebGLRenderer({alpha:transparent,antialias:antialias})
@renderer.setSize( w, h )
document.body.appendChild(@renderer.domElement)
return
@add = (obj)->
@scene.add(obj)
return
@render = ()->
@camera.position.x += (@cameraTarget.x-@camera.position.x)*0.05
@camera.position.y += (@cameraTarget.y-@camera.position.y)*0.05
@camera.position.z += (@cameraTarget.z-@camera.position.z)*0.05
@camera.lookAt(@scene.position)
Stage3d.renderer.render(@scene, @camera)
return
@resize = ()->
if @renderer
@camera.aspect = window.innerWidth / window.innerHeight
@camera.updateProjectionMatrix()
@renderer.setSize( window.innerWidth, window.innerHeight )
return |
[
{
"context": " input = {\n flow: 'EXR'\n key: 'A..EUR.SP00.A'\n }\n query = sdmxrest.getDataQuery inpu",
"end": 4299,
"score": 0.9995096921920776,
"start": 4286,
"tag": "KEY",
"value": "A..EUR.SP00.A"
},
{
"context": " input = {\n flow: 'EXR'\... | test/index.test.coffee | sosna/sdmx-rest.js | 16 | sdmxrest = require '../src/index'
{ApiVersion} = require '../src/utils/api-version'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.use chaiAsPromised
should = chai.should()
nock = require 'nock'
describe 'API', ->
it 'offers the expected functions and objects', ->
sdmxrest.should.have.property 'getService'
sdmxrest.should.have.property('services').that.is.an 'array'
sdmxrest.should.have.property 'getDataQuery'
sdmxrest.should.have.property 'getMetadataQuery'
sdmxrest.should.have.property 'getAvailabilityQuery'
sdmxrest.should.have.property 'getSchemaQuery'
sdmxrest.should.have.property 'getUrl'
sdmxrest.should.have.property 'request'
sdmxrest.should.have.property 'checkStatus'
sdmxrest.should.have.property 'checkMediaType'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.should.have.property('metadata').that.is.an 'object'
sdmxrest.should.have.property('availability').that.is.an 'object'
sdmxrest.should.have.property('schema').that.is.an 'object'
sdmxrest.should.have.property('utils').that.is.an 'object'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.data.should.have.property('DataFormat').that.is.not.undefined
sdmxrest.data.should.have.property('DataDetail').that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataDetail')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataFormat')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataReferences')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityMode')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityReferences')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataType').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaContext').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaFormat').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiVersion').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiResources').that.is.not.undefined
sdmxrest.utils.should.have.property('SdmxPatterns').that.is.not.undefined
sdmxrest.utils.SdmxPatterns.should.have.property('IDType')
.that.is.a 'regexp'
describe 'when using getService()', ->
it 'offers to use existing services', ->
service = sdmxrest.getService 'ECB'
service.should.be.an 'object'
service.should.have.property('id').that.equals 'ECB'
service.should.have.property('name').that.equals 'European Central Bank'
service.should.have.property('url').that.contains 'sdw-wsrest'
service.should.have.property('api').that.is.not.undefined
it 'offers to create services from properties', ->
input = {
id: 'TEST'
url: 'http://test.com'
}
service = sdmxrest.getService input
service.should.be.an 'object'
service.should.have.property('id').that.equals input.id
service.should.have.property('name').that.is.undefined
service.should.have.property('url').that.equals input.url
service.should.have.property('api').that.equals ApiVersion.LATEST
it 'fails if the requested service is unknown', ->
test = -> sdmxrest.getService 'UNKNOWN'
should.Throw(test, ReferenceError,
'is not in the list of predefined services')
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getService 2
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService undefined
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService []
should.Throw(test, TypeError, 'Invalid type of ')
describe 'when using services', ->
it 'list some services', ->
sdmxrest.services.should.be.an 'array'
sdmxrest.services.should.have.property('length').that.is.gte 5
it 'should contain known services', ->
sdmxrest.services.should.include.members([sdmxrest.getService 'ECB_S'])
describe 'when using getDataQuery()', ->
it 'offers to create a data query from properties', ->
input = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
}
query = sdmxrest.getDataQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('firstNObs').that.is.undefined
query.should.have.property('lastNObs').that.is.undefined
query.should.have.property('obsDimension').that.is.undefined
query.should.have.property('detail').that.equals 'full'
query.should.have.property('history').that.is.false
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getDataQuery undefined
should.Throw(test, Error, 'Not a valid data query')
test = -> sdmxrest.getDataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid data query')
describe 'when using getMetadataQuery()', ->
it 'offers to create a metadata query from properties', ->
input = {
resource: 'codelist'
id: 'CL_FREQ'
}
query = sdmxrest.getMetadataQuery input
query.should.be.an 'object'
query.should.have.property('resource').that.equals input.resource
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals 'all'
query.should.have.property('version').that.equals 'latest'
query.should.have.property('item').that.equals 'all'
query.should.have.property('detail').that.equals 'full'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getMetadataQuery undefined
should.Throw(test, Error, 'Not a valid metadata query')
test = -> sdmxrest.getMetadataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid metadata query')
describe 'when using getAvailabilityQuery()', ->
it 'offers to create an availability query from properties', ->
input = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
}
query = sdmxrest.getAvailabilityQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('component').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('mode').that.equals 'exact'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getAvailabilityQuery undefined
should.Throw(test, Error, 'Not a valid availability query')
test = -> sdmxrest.getAvailabilityQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid availability query')
describe 'when using getSchemaQuery()', ->
it 'offers to create a schema query from properties', ->
input = {
context: 'datastructure'
agency: 'BIS'
id: 'BIS_CBS'
}
query = sdmxrest.getSchemaQuery input
query.should.be.an 'object'
query.should.have.property('context').that.equals input.context
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals input.agency
query.should.have.property('version').that.equals 'latest'
query.should.have.property('explicit').that.is.false
query.should.have.property('obsDimension').that.is.undefined
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getSchemaQuery undefined
should.Throw(test, Error, 'Not a valid schema query')
test = -> sdmxrest.getSchemaQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid schema query')
describe 'when using getUrl()', ->
it 'creates a URL from a data query and a service objects', ->
query = sdmxrest.getDataQuery {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}
service = sdmxrest.getService 'ECB'
url = sdmxrest.getUrl query, service
url.should.be.a 'string'
url.should.contain service.url
url.should.contain query.flow
url.should.contain query.key
it 'creates a URL from a metadata query and a service objects', ->
url = sdmxrest.getUrl {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'codelist'
url.should.contain 'CL_FREQ'
it 'creates a URL from a schema query and a service objects', ->
q = {'context': 'dataflow', 'agency': 'BIS', 'id': 'CBS'}
url = sdmxrest.getUrl q, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'schema'
url.should.contain 'dataflow/BIS/CBS'
it 'creates a URL from an availability query and a service objects', ->
input = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
}
q = sdmxrest.getAvailabilityQuery input
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
it 'creates a URL from an availability and service objects (mode)', ->
q = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
mode: 'exact'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'mode=exact'
it 'creates a URL from an availability and a service objects (component)', ->
q = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
component: 'FREQ'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'FREQ'
it 'creates a URL from an availability and a service objects (references)', ->
q = {
flow: 'EXR'
key: 'A..EUR.SP00.A'
references: 'all'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'references=all'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getUrl undefined, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
test = -> sdmxrest.getUrl {}, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
query = sdmxrest.getDataQuery {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}
test = -> sdmxrest.getUrl query, sdmxrest.getService 'TEST'
should.Throw(test, Error, 'not in the list of predefined services')
test = -> sdmxrest.getUrl query
should.Throw(test, Error, 'Not a valid service')
describe 'when using request()', ->
it 'offers to execute a request from a query and service objects', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (known service)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (unknown service)', ->
query = nock('http://test.org')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://test.org/data/EXR'
response.should.eventually.equal 'OK'
it 'throws an exception in case of issues with a request', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 404
response = sdmxrest.request {flow: 'TEST'}, 'ECB'
response.should.be.rejectedWith RangeError
it 'does not throw an exception for a 404 with updatedAfter', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('ICP') > -1)
.reply 404
response = sdmxrest.request \
{flow: 'ICP', updatedAfter: '2016-01-01T14:54:27Z'}, 'ECB'
response.should.be.fulfilled
response.should.not.be.rejected
it 'throws an exception when the Service URL is invalid', ->
response = sdmxrest.request {flow: 'ICP'}, {url: 'ws.test'}
response.should.not.be.fulfilled
response.should.be.rejected
it 'adds an accept header to data queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to structure queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to schema queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to data URLs if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
url = 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR/A..EUR.SP00.A'
response = sdmxrest.request url
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (data)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/xml'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (structure)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+json;version=1.0.0') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+json;version=1.0.0'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (schema)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml;version=2.1') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+xml;version=2.1'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not add an accept header to data queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('EO') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EO'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to structure queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to schema queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*' )
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'OECD'
response.should.eventually.equal 'OK'
it 'adds a default user agent to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) ->
h[0] is 'sdmx-rest4js (https://github.com/sosna/sdmx-rest4js)')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB'
response.should.eventually.equal 'OK'
it 'does not overwrite the user agent passed by the client', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) -> h[0] is 'test')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
'user-agent': 'test'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'adds a default accept-encoding header to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h[0] is 'gzip,deflate')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB'
response.should.eventually.equal 'OK'
it 'allows disabling content compression', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h is undefined)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
compress: false
response =
sdmxrest.request {flow: 'EXR', key: 'A.CHF.NOK.SP00.A'}, 'ECB', opts
response.should.eventually.equal 'OK'
describe 'when using request2()', ->
it 'offers a way to retrieve response headers', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'X-My-Headers': 'My Header value'}
request = sdmxrest.getDataQuery({flow: 'EXR', key: 'A.CHF.EUR.SP00.A'})
sdmxrest.request2(request, 'ECB').then((response) ->
response.should.have.property('status').that.equals 200
response.should.have.property('headers')
response.should.respondTo 'text'
)
describe 'when using checkStatus()', ->
it 'throws an errir in case there is no response', ->
it 'throws an error in case there is an issue with the response', ->
request = sdmxrest.getDataQuery({flow: 'TEST'})
test = -> sdmxrest.checkStatus(request, undefined)
should.Throw(test, ReferenceError, 'Not a valid response')
it 'accept codes in the 300 range', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 306, 'Redirected'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 306')
)
it 'accept code 100', ->
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 100, 'Continue'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 100')
)
describe 'when using checkMediaType()', ->
it 'accepts SDMX data formats', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR', key: 'A.CHF.EUR.SP00.A'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts SDMX metadata formats', ->
fmt = 'application/vnd.sdmx.structure+xml;version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts generic formats', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'throws an error in case the format is not an SDMX one', ->
fmt = 'application/vnd.test.data+json'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: ' + fmt))
it 'throws an error in case no format is specified', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: null'))
it 'throws an error in case the format is not the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Wrong format: requested ' + fmt + ' but got application/xml'))
it 'Does not throw an error in case the received format is the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Does not throw an error in case the only difference is the space character', ->
fmt1 = 'application/vnd.sdmx.genericdata+xml;version=2.1'
fmt2 = 'application/vnd.sdmx.genericdata+xml; version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt2}
opts =
headers:
accept: fmt1
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt1, response)
should.not.Throw(test))
it 'Does not throw an error in case the received format is one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'text/csv'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Throws an error in case the received format is not one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError))
| 17966 | sdmxrest = require '../src/index'
{ApiVersion} = require '../src/utils/api-version'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.use chaiAsPromised
should = chai.should()
nock = require 'nock'
describe 'API', ->
it 'offers the expected functions and objects', ->
sdmxrest.should.have.property 'getService'
sdmxrest.should.have.property('services').that.is.an 'array'
sdmxrest.should.have.property 'getDataQuery'
sdmxrest.should.have.property 'getMetadataQuery'
sdmxrest.should.have.property 'getAvailabilityQuery'
sdmxrest.should.have.property 'getSchemaQuery'
sdmxrest.should.have.property 'getUrl'
sdmxrest.should.have.property 'request'
sdmxrest.should.have.property 'checkStatus'
sdmxrest.should.have.property 'checkMediaType'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.should.have.property('metadata').that.is.an 'object'
sdmxrest.should.have.property('availability').that.is.an 'object'
sdmxrest.should.have.property('schema').that.is.an 'object'
sdmxrest.should.have.property('utils').that.is.an 'object'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.data.should.have.property('DataFormat').that.is.not.undefined
sdmxrest.data.should.have.property('DataDetail').that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataDetail')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataFormat')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataReferences')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityMode')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityReferences')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataType').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaContext').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaFormat').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiVersion').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiResources').that.is.not.undefined
sdmxrest.utils.should.have.property('SdmxPatterns').that.is.not.undefined
sdmxrest.utils.SdmxPatterns.should.have.property('IDType')
.that.is.a 'regexp'
describe 'when using getService()', ->
it 'offers to use existing services', ->
service = sdmxrest.getService 'ECB'
service.should.be.an 'object'
service.should.have.property('id').that.equals 'ECB'
service.should.have.property('name').that.equals 'European Central Bank'
service.should.have.property('url').that.contains 'sdw-wsrest'
service.should.have.property('api').that.is.not.undefined
it 'offers to create services from properties', ->
input = {
id: 'TEST'
url: 'http://test.com'
}
service = sdmxrest.getService input
service.should.be.an 'object'
service.should.have.property('id').that.equals input.id
service.should.have.property('name').that.is.undefined
service.should.have.property('url').that.equals input.url
service.should.have.property('api').that.equals ApiVersion.LATEST
it 'fails if the requested service is unknown', ->
test = -> sdmxrest.getService 'UNKNOWN'
should.Throw(test, ReferenceError,
'is not in the list of predefined services')
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getService 2
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService undefined
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService []
should.Throw(test, TypeError, 'Invalid type of ')
describe 'when using services', ->
it 'list some services', ->
sdmxrest.services.should.be.an 'array'
sdmxrest.services.should.have.property('length').that.is.gte 5
it 'should contain known services', ->
sdmxrest.services.should.include.members([sdmxrest.getService 'ECB_S'])
describe 'when using getDataQuery()', ->
it 'offers to create a data query from properties', ->
input = {
flow: 'EXR'
key: '<KEY>'
}
query = sdmxrest.getDataQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('firstNObs').that.is.undefined
query.should.have.property('lastNObs').that.is.undefined
query.should.have.property('obsDimension').that.is.undefined
query.should.have.property('detail').that.equals 'full'
query.should.have.property('history').that.is.false
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getDataQuery undefined
should.Throw(test, Error, 'Not a valid data query')
test = -> sdmxrest.getDataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid data query')
describe 'when using getMetadataQuery()', ->
it 'offers to create a metadata query from properties', ->
input = {
resource: 'codelist'
id: 'CL_FREQ'
}
query = sdmxrest.getMetadataQuery input
query.should.be.an 'object'
query.should.have.property('resource').that.equals input.resource
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals 'all'
query.should.have.property('version').that.equals 'latest'
query.should.have.property('item').that.equals 'all'
query.should.have.property('detail').that.equals 'full'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getMetadataQuery undefined
should.Throw(test, Error, 'Not a valid metadata query')
test = -> sdmxrest.getMetadataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid metadata query')
describe 'when using getAvailabilityQuery()', ->
it 'offers to create an availability query from properties', ->
input = {
flow: 'EXR'
key: '<KEY>'
}
query = sdmxrest.getAvailabilityQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('component').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('mode').that.equals 'exact'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getAvailabilityQuery undefined
should.Throw(test, Error, 'Not a valid availability query')
test = -> sdmxrest.getAvailabilityQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid availability query')
describe 'when using getSchemaQuery()', ->
it 'offers to create a schema query from properties', ->
input = {
context: 'datastructure'
agency: 'BIS'
id: 'BIS_CBS'
}
query = sdmxrest.getSchemaQuery input
query.should.be.an 'object'
query.should.have.property('context').that.equals input.context
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals input.agency
query.should.have.property('version').that.equals 'latest'
query.should.have.property('explicit').that.is.false
query.should.have.property('obsDimension').that.is.undefined
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getSchemaQuery undefined
should.Throw(test, Error, 'Not a valid schema query')
test = -> sdmxrest.getSchemaQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid schema query')
describe 'when using getUrl()', ->
it 'creates a URL from a data query and a service objects', ->
query = sdmxrest.getDataQuery {flow: 'EXR', key: '<KEY>'}
service = sdmxrest.getService 'ECB'
url = sdmxrest.getUrl query, service
url.should.be.a 'string'
url.should.contain service.url
url.should.contain query.flow
url.should.contain query.key
it 'creates a URL from a metadata query and a service objects', ->
url = sdmxrest.getUrl {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'codelist'
url.should.contain 'CL_FREQ'
it 'creates a URL from a schema query and a service objects', ->
q = {'context': 'dataflow', 'agency': 'BIS', 'id': 'CBS'}
url = sdmxrest.getUrl q, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'schema'
url.should.contain 'dataflow/BIS/CBS'
it 'creates a URL from an availability query and a service objects', ->
input = {
flow: 'EXR'
key: '<KEY>'
}
q = sdmxrest.getAvailabilityQuery input
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
it 'creates a URL from an availability and service objects (mode)', ->
q = {
flow: 'EXR'
key: '<KEY>'
mode: 'exact'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'mode=exact'
it 'creates a URL from an availability and a service objects (component)', ->
q = {
flow: 'EXR'
key: '<KEY>'
component: 'FREQ'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'FREQ'
it 'creates a URL from an availability and a service objects (references)', ->
q = {
flow: 'EXR'
key: '<KEY>'
references: 'all'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'references=all'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getUrl undefined, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
test = -> sdmxrest.getUrl {}, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
query = sdmxrest.getDataQuery {flow: 'EXR', key: '<KEY>'}
test = -> sdmxrest.getUrl query, sdmxrest.getService 'TEST'
should.Throw(test, Error, 'not in the list of predefined services')
test = -> sdmxrest.getUrl query
should.Throw(test, Error, 'Not a valid service')
describe 'when using request()', ->
it 'offers to execute a request from a query and service objects', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (known service)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (unknown service)', ->
query = nock('http://test.org')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://test.org/data/EXR'
response.should.eventually.equal 'OK'
it 'throws an exception in case of issues with a request', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 404
response = sdmxrest.request {flow: 'TEST'}, 'ECB'
response.should.be.rejectedWith RangeError
it 'does not throw an exception for a 404 with updatedAfter', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('ICP') > -1)
.reply 404
response = sdmxrest.request \
{flow: 'ICP', updatedAfter: '2016-01-01T14:54:27Z'}, 'ECB'
response.should.be.fulfilled
response.should.not.be.rejected
it 'throws an exception when the Service URL is invalid', ->
response = sdmxrest.request {flow: 'ICP'}, {url: 'ws.test'}
response.should.not.be.fulfilled
response.should.be.rejected
it 'adds an accept header to data queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to structure queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to schema queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to data URLs if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
url = 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR/A..EUR.SP00.A'
response = sdmxrest.request url
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (data)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/xml'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (structure)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+json;version=1.0.0') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+json;version=1.0.0'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (schema)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml;version=2.1') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+xml;version=2.1'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not add an accept header to data queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('EO') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EO'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to structure queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to schema queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*' )
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'OECD'
response.should.eventually.equal 'OK'
it 'adds a default user agent to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) ->
h[0] is 'sdmx-rest4js (https://github.com/sosna/sdmx-rest4js)')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB'
response.should.eventually.equal 'OK'
it 'does not overwrite the user agent passed by the client', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) -> h[0] is 'test')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
'user-agent': 'test'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'adds a default accept-encoding header to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h[0] is 'gzip,deflate')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB'
response.should.eventually.equal 'OK'
it 'allows disabling content compression', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h is undefined)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
compress: false
response =
sdmxrest.request {flow: 'EXR', key: '<KEY>'}, 'ECB', opts
response.should.eventually.equal 'OK'
describe 'when using request2()', ->
it 'offers a way to retrieve response headers', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'X-My-Headers': 'My Header value'}
request = sdmxrest.getDataQuery({flow: 'EXR', key: '<KEY>'})
sdmxrest.request2(request, 'ECB').then((response) ->
response.should.have.property('status').that.equals 200
response.should.have.property('headers')
response.should.respondTo 'text'
)
describe 'when using checkStatus()', ->
it 'throws an errir in case there is no response', ->
it 'throws an error in case there is an issue with the response', ->
request = sdmxrest.getDataQuery({flow: 'TEST'})
test = -> sdmxrest.checkStatus(request, undefined)
should.Throw(test, ReferenceError, 'Not a valid response')
it 'accept codes in the 300 range', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 306, 'Redirected'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 306')
)
it 'accept code 100', ->
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 100, 'Continue'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 100')
)
describe 'when using checkMediaType()', ->
it 'accepts SDMX data formats', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR', key: '<KEY>'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts SDMX metadata formats', ->
fmt = 'application/vnd.sdmx.structure+xml;version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts generic formats', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'throws an error in case the format is not an SDMX one', ->
fmt = 'application/vnd.test.data+json'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: ' + fmt))
it 'throws an error in case no format is specified', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: null'))
it 'throws an error in case the format is not the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Wrong format: requested ' + fmt + ' but got application/xml'))
it 'Does not throw an error in case the received format is the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Does not throw an error in case the only difference is the space character', ->
fmt1 = 'application/vnd.sdmx.genericdata+xml;version=2.1'
fmt2 = 'application/vnd.sdmx.genericdata+xml; version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt2}
opts =
headers:
accept: fmt1
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt1, response)
should.not.Throw(test))
it 'Does not throw an error in case the received format is one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'text/csv'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Throws an error in case the received format is not one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError))
| true | sdmxrest = require '../src/index'
{ApiVersion} = require '../src/utils/api-version'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.use chaiAsPromised
should = chai.should()
nock = require 'nock'
describe 'API', ->
it 'offers the expected functions and objects', ->
sdmxrest.should.have.property 'getService'
sdmxrest.should.have.property('services').that.is.an 'array'
sdmxrest.should.have.property 'getDataQuery'
sdmxrest.should.have.property 'getMetadataQuery'
sdmxrest.should.have.property 'getAvailabilityQuery'
sdmxrest.should.have.property 'getSchemaQuery'
sdmxrest.should.have.property 'getUrl'
sdmxrest.should.have.property 'request'
sdmxrest.should.have.property 'checkStatus'
sdmxrest.should.have.property 'checkMediaType'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.should.have.property('metadata').that.is.an 'object'
sdmxrest.should.have.property('availability').that.is.an 'object'
sdmxrest.should.have.property('schema').that.is.an 'object'
sdmxrest.should.have.property('utils').that.is.an 'object'
sdmxrest.should.have.property('data').that.is.an 'object'
sdmxrest.data.should.have.property('DataFormat').that.is.not.undefined
sdmxrest.data.should.have.property('DataDetail').that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataDetail')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataFormat')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataReferences')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityMode')
.that.is.not.undefined
sdmxrest.availability.should.have.property('AvailabilityReferences')
.that.is.not.undefined
sdmxrest.metadata.should.have.property('MetadataType').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaContext').that.is.not.undefined
sdmxrest.schema.should.have.property('SchemaFormat').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiVersion').that.is.not.undefined
sdmxrest.utils.should.have.property('ApiResources').that.is.not.undefined
sdmxrest.utils.should.have.property('SdmxPatterns').that.is.not.undefined
sdmxrest.utils.SdmxPatterns.should.have.property('IDType')
.that.is.a 'regexp'
describe 'when using getService()', ->
it 'offers to use existing services', ->
service = sdmxrest.getService 'ECB'
service.should.be.an 'object'
service.should.have.property('id').that.equals 'ECB'
service.should.have.property('name').that.equals 'European Central Bank'
service.should.have.property('url').that.contains 'sdw-wsrest'
service.should.have.property('api').that.is.not.undefined
it 'offers to create services from properties', ->
input = {
id: 'TEST'
url: 'http://test.com'
}
service = sdmxrest.getService input
service.should.be.an 'object'
service.should.have.property('id').that.equals input.id
service.should.have.property('name').that.is.undefined
service.should.have.property('url').that.equals input.url
service.should.have.property('api').that.equals ApiVersion.LATEST
it 'fails if the requested service is unknown', ->
test = -> sdmxrest.getService 'UNKNOWN'
should.Throw(test, ReferenceError,
'is not in the list of predefined services')
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getService 2
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService undefined
should.Throw(test, TypeError, 'Invalid type of ')
test = -> sdmxrest.getService []
should.Throw(test, TypeError, 'Invalid type of ')
describe 'when using services', ->
it 'list some services', ->
sdmxrest.services.should.be.an 'array'
sdmxrest.services.should.have.property('length').that.is.gte 5
it 'should contain known services', ->
sdmxrest.services.should.include.members([sdmxrest.getService 'ECB_S'])
describe 'when using getDataQuery()', ->
it 'offers to create a data query from properties', ->
input = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
}
query = sdmxrest.getDataQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('firstNObs').that.is.undefined
query.should.have.property('lastNObs').that.is.undefined
query.should.have.property('obsDimension').that.is.undefined
query.should.have.property('detail').that.equals 'full'
query.should.have.property('history').that.is.false
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getDataQuery undefined
should.Throw(test, Error, 'Not a valid data query')
test = -> sdmxrest.getDataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid data query')
describe 'when using getMetadataQuery()', ->
it 'offers to create a metadata query from properties', ->
input = {
resource: 'codelist'
id: 'CL_FREQ'
}
query = sdmxrest.getMetadataQuery input
query.should.be.an 'object'
query.should.have.property('resource').that.equals input.resource
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals 'all'
query.should.have.property('version').that.equals 'latest'
query.should.have.property('item').that.equals 'all'
query.should.have.property('detail').that.equals 'full'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getMetadataQuery undefined
should.Throw(test, Error, 'Not a valid metadata query')
test = -> sdmxrest.getMetadataQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid metadata query')
describe 'when using getAvailabilityQuery()', ->
it 'offers to create an availability query from properties', ->
input = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
}
query = sdmxrest.getAvailabilityQuery input
query.should.be.an 'object'
query.should.have.property('flow').that.equals input.flow
query.should.have.property('key').that.equals input.key
query.should.have.property('provider').that.equals 'all'
query.should.have.property('component').that.equals 'all'
query.should.have.property('start').that.is.undefined
query.should.have.property('end').that.is.undefined
query.should.have.property('updatedAfter').that.is.undefined
query.should.have.property('mode').that.equals 'exact'
query.should.have.property('references').that.equals 'none'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getAvailabilityQuery undefined
should.Throw(test, Error, 'Not a valid availability query')
test = -> sdmxrest.getAvailabilityQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid availability query')
describe 'when using getSchemaQuery()', ->
it 'offers to create a schema query from properties', ->
input = {
context: 'datastructure'
agency: 'BIS'
id: 'BIS_CBS'
}
query = sdmxrest.getSchemaQuery input
query.should.be.an 'object'
query.should.have.property('context').that.equals input.context
query.should.have.property('id').that.equals input.id
query.should.have.property('agency').that.equals input.agency
query.should.have.property('version').that.equals 'latest'
query.should.have.property('explicit').that.is.false
query.should.have.property('obsDimension').that.is.undefined
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getSchemaQuery undefined
should.Throw(test, Error, 'Not a valid schema query')
test = -> sdmxrest.getSchemaQuery {test: 'TEST'}
should.Throw(test, Error, 'Not a valid schema query')
describe 'when using getUrl()', ->
it 'creates a URL from a data query and a service objects', ->
query = sdmxrest.getDataQuery {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}
service = sdmxrest.getService 'ECB'
url = sdmxrest.getUrl query, service
url.should.be.a 'string'
url.should.contain service.url
url.should.contain query.flow
url.should.contain query.key
it 'creates a URL from a metadata query and a service objects', ->
url = sdmxrest.getUrl {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'codelist'
url.should.contain 'CL_FREQ'
it 'creates a URL from a schema query and a service objects', ->
q = {'context': 'dataflow', 'agency': 'BIS', 'id': 'CBS'}
url = sdmxrest.getUrl q, 'ECB'
url.should.be.a 'string'
url.should.contain 'sdw-wsrest.ecb.europa.eu'
url.should.contain 'schema'
url.should.contain 'dataflow/BIS/CBS'
it 'creates a URL from an availability query and a service objects', ->
input = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
}
q = sdmxrest.getAvailabilityQuery input
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
it 'creates a URL from an availability and service objects (mode)', ->
q = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
mode: 'exact'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'mode=exact'
it 'creates a URL from an availability and a service objects (component)', ->
q = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
component: 'FREQ'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'FREQ'
it 'creates a URL from an availability and a service objects (references)', ->
q = {
flow: 'EXR'
key: 'PI:KEY:<KEY>END_PI'
references: 'all'
}
s = sdmxrest.getService({url: 'http://ws-entry-point'});
url = sdmxrest.getUrl q, s
url.should.be.a 'string'
url.should.contain 'http://ws-entry-point'
url.should.contain 'availableconstraint'
url.should.contain 'EXR/A..EUR.SP00.A'
url.should.contain 'references=all'
it 'fails if the input is not of the expected type', ->
test = -> sdmxrest.getUrl undefined, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
test = -> sdmxrest.getUrl {}, sdmxrest.getService 'ECB'
should.Throw(test, Error, 'Not a valid query')
query = sdmxrest.getDataQuery {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}
test = -> sdmxrest.getUrl query, sdmxrest.getService 'TEST'
should.Throw(test, Error, 'not in the list of predefined services')
test = -> sdmxrest.getUrl query
should.Throw(test, Error, 'Not a valid service')
describe 'when using request()', ->
it 'offers to execute a request from a query and service objects', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (known service)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR'
response.should.eventually.equal 'OK'
it 'offers to execute a request from an SDMX RESTful query string (unknown service)', ->
query = nock('http://test.org')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response = sdmxrest.request 'http://test.org/data/EXR'
response.should.eventually.equal 'OK'
it 'throws an exception in case of issues with a request', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 404
response = sdmxrest.request {flow: 'TEST'}, 'ECB'
response.should.be.rejectedWith RangeError
it 'does not throw an exception for a 404 with updatedAfter', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('ICP') > -1)
.reply 404
response = sdmxrest.request \
{flow: 'ICP', updatedAfter: '2016-01-01T14:54:27Z'}, 'ECB'
response.should.be.fulfilled
response.should.not.be.rejected
it 'throws an exception when the Service URL is invalid', ->
response = sdmxrest.request {flow: 'ICP'}, {url: 'ws.test'}
response.should.not.be.fulfilled
response.should.be.rejected
it 'adds an accept header to data queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to structure queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to schema queries if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB'
response.should.eventually.equal 'OK'
it 'adds an accept header to data URLs if the service has a default format', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.data+json') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
url = 'http://sdw-wsrest.ecb.europa.eu/service/data/EXR/A..EUR.SP00.A'
response = sdmxrest.request url
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (data)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/xml') > -1)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/xml'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (structure)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+json;version=1.0.0') > -1)
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+json;version=1.0.0'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not overwrite the accept header passed by the client (schema)', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept', (h) ->
h[0].indexOf('application/vnd.sdmx.structure+xml;version=2.1') > -1)
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
opts =
headers:
accept: 'application/vnd.sdmx.structure+xml;version=2.1'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'does not add an accept header to data queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('EO') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EO'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to structure queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {resource: 'codelist', id: 'CL_FREQ'}, 'OECD'
response.should.eventually.equal 'OK'
it 'does not add an accept header to schema queries if the service does not have a default format', ->
query = nock('http://stats.oecd.org')
.matchHeader('accept', (h) -> h[0] is '*/*' )
.get((uri) -> uri.indexOf('schema') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {context: 'dataflow', agency: 'ECB', id: 'EXR'}, 'OECD'
response.should.eventually.equal 'OK'
it 'adds a default user agent to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) ->
h[0] is 'sdmx-rest4js (https://github.com/sosna/sdmx-rest4js)')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB'
response.should.eventually.equal 'OK'
it 'does not overwrite the user agent passed by the client', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('user-agent', (h) -> h[0] is 'test')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
headers:
'user-agent': 'test'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB', opts
response.should.eventually.equal 'OK'
it 'adds a default accept-encoding header to queries', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h[0] is 'gzip,deflate')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB'
response.should.eventually.equal 'OK'
it 'allows disabling content compression', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.matchHeader('accept-encoding', (h) -> h is undefined)
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
opts =
compress: false
response =
sdmxrest.request {flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB', opts
response.should.eventually.equal 'OK'
describe 'when using request2()', ->
it 'offers a way to retrieve response headers', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'X-My-Headers': 'My Header value'}
request = sdmxrest.getDataQuery({flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'})
sdmxrest.request2(request, 'ECB').then((response) ->
response.should.have.property('status').that.equals 200
response.should.have.property('headers')
response.should.respondTo 'text'
)
describe 'when using checkStatus()', ->
it 'throws an errir in case there is no response', ->
it 'throws an error in case there is an issue with the response', ->
request = sdmxrest.getDataQuery({flow: 'TEST'})
test = -> sdmxrest.checkStatus(request, undefined)
should.Throw(test, ReferenceError, 'Not a valid response')
it 'accept codes in the 300 range', ->
query = nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 306, 'Redirected'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 306')
)
it 'accept code 100', ->
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('TEST') > -1)
.reply 100, 'Continue'
request = sdmxrest.getDataQuery({flow: 'TEST'})
sdmxrest.request2(request, 'ECB').then((response) ->
test = -> sdmxrest.checkStatus(request, response)
should.not.throw(test, RangeError, 'Request failed with error code 100')
)
describe 'when using checkMediaType()', ->
it 'accepts SDMX data formats', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR', key: 'PI:KEY:<KEY>END_PI'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts SDMX metadata formats', ->
fmt = 'application/vnd.sdmx.structure+xml;version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'accepts generic formats', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('codelist') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({resource: 'codelist'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.throw(test, RangeError, 'Not an SDMX format')
)
it 'throws an error in case the format is not an SDMX one', ->
fmt = 'application/vnd.test.data+json'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: ' + fmt))
it 'throws an error in case no format is specified', ->
fmt = 'application/xml'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK'
sdmxrest.request2({flow: 'EXR'}, 'ECB').then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Not an SDMX format: null'))
it 'throws an error in case the format is not the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError, 'Wrong format: requested ' + fmt + ' but got application/xml'))
it 'Does not throw an error in case the received format is the requested one', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Does not throw an error in case the only difference is the space character', ->
fmt1 = 'application/vnd.sdmx.genericdata+xml;version=2.1'
fmt2 = 'application/vnd.sdmx.genericdata+xml; version=2.1'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': fmt2}
opts =
headers:
accept: fmt1
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt1, response)
should.not.Throw(test))
it 'Does not throw an error in case the received format is one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'text/csv'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.not.Throw(test))
it 'Throws an error in case the received format is not one of the requested ones', ->
fmt = 'application/vnd.sdmx.data+json;version=1.0.0, application/json;q=0.9, text/csv;q=0.5, */*;q=0.4'
nock('http://sdw-wsrest.ecb.europa.eu')
.get((uri) -> uri.indexOf('EXR') > -1)
.reply 200, 'OK', {'Content-Type': 'application/xml'}
opts =
headers:
accept: fmt
sdmxrest.request2({flow: 'EXR'}, 'ECB', opts).then((response) ->
test = -> sdmxrest.checkMediaType(fmt, response)
should.Throw(test, RangeError))
|
[
{
"context": "bot 特徴語抽出 <text> - <text>の特徴語を抽出する\n#\n# Author:\n# yagizombie <yanagihara+zombie@brainpad.co.jp>\n\nhttp = requir",
"end": 257,
"score": 0.9997129440307617,
"start": 247,
"tag": "USERNAME",
"value": "yagizombie"
},
{
"context": "xt> - <text>の特徴語を抽出する\n#\n# Author:\n#... | src/conomi-keyphrase.coffee | yagizombie/hubot-conomi-keyphrase | 0 | # Description
# 特徴語抽出をする
# http://developer.yahoo.co.jp/webapi/jlp/keyphrase/v1/extract.html
#
# Configuration:
# CONOMI_YD_APP_ID Yahoo! JAPANのWebサービスを利用するためのアプリケーションID
#
# Commands:
# hubot 特徴語抽出 <text> - <text>の特徴語を抽出する
#
# Author:
# yagizombie <yanagihara+zombie@brainpad.co.jp>
http = require 'http'
APP_ID = process.env.CONOMI_YD_APP_ID
module.exports = (robot) ->
robot.respond /(特徴語抽出|keyphrase)\s(.*)$/i, (msg) ->
h = "jlp.yahooapis.jp"
p = "/KeyphraseService/V1/extract?appid=" + APP_ID
p += "&output=json&sentence=" + encodeURIComponent(msg.match[2])
msg.send "むむっ (grumpycat)"
req = http.get { host:h, path:p }, (res) ->
contents = ""
res.on 'data', (chunk) ->
contents += "#{chunk}"
res.on 'end', () ->
rep = "/quote ∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴\n"
j = JSON.parse contents
for key,value of j
rep += key + ": " + value + "\n"
msg.send rep
msg.send "こんな感じ♪ @#{msg.message.user.mention_name}"
req.on "error", (e) ->
msg.send "(fu) ひでぶっ!! ... {e.message}"
| 121502 | # Description
# 特徴語抽出をする
# http://developer.yahoo.co.jp/webapi/jlp/keyphrase/v1/extract.html
#
# Configuration:
# CONOMI_YD_APP_ID Yahoo! JAPANのWebサービスを利用するためのアプリケーションID
#
# Commands:
# hubot 特徴語抽出 <text> - <text>の特徴語を抽出する
#
# Author:
# yagizombie <<EMAIL>>
http = require 'http'
APP_ID = process.env.CONOMI_YD_APP_ID
module.exports = (robot) ->
robot.respond /(特徴語抽出|keyphrase)\s(.*)$/i, (msg) ->
h = "jlp.yahooapis.jp"
p = "/KeyphraseService/V1/extract?appid=" + APP_ID
p += "&output=json&sentence=" + encodeURIComponent(msg.match[2])
msg.send "むむっ (grumpycat)"
req = http.get { host:h, path:p }, (res) ->
contents = ""
res.on 'data', (chunk) ->
contents += "#{chunk}"
res.on 'end', () ->
rep = "/quote ∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴\n"
j = JSON.parse contents
for key,value of j
rep += key + ": " + value + "\n"
msg.send rep
msg.send "こんな感じ♪ @#{msg.message.user.mention_name}"
req.on "error", (e) ->
msg.send "(fu) ひでぶっ!! ... {e.message}"
| true | # Description
# 特徴語抽出をする
# http://developer.yahoo.co.jp/webapi/jlp/keyphrase/v1/extract.html
#
# Configuration:
# CONOMI_YD_APP_ID Yahoo! JAPANのWebサービスを利用するためのアプリケーションID
#
# Commands:
# hubot 特徴語抽出 <text> - <text>の特徴語を抽出する
#
# Author:
# yagizombie <PI:EMAIL:<EMAIL>END_PI>
http = require 'http'
APP_ID = process.env.CONOMI_YD_APP_ID
module.exports = (robot) ->
robot.respond /(特徴語抽出|keyphrase)\s(.*)$/i, (msg) ->
h = "jlp.yahooapis.jp"
p = "/KeyphraseService/V1/extract?appid=" + APP_ID
p += "&output=json&sentence=" + encodeURIComponent(msg.match[2])
msg.send "むむっ (grumpycat)"
req = http.get { host:h, path:p }, (res) ->
contents = ""
res.on 'data', (chunk) ->
contents += "#{chunk}"
res.on 'end', () ->
rep = "/quote ∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴‥∵‥∴‥∴‥∵‥∴\n"
j = JSON.parse contents
for key,value of j
rep += key + ": " + value + "\n"
msg.send rep
msg.send "こんな感じ♪ @#{msg.message.user.mention_name}"
req.on "error", (e) ->
msg.send "(fu) ひでぶっ!! ... {e.message}"
|
[
{
"context": " clock = sinon.useFakeTimers()\n pretend.user 'tester', id:'tester', room: 'testing'\n\n Object.getOwn",
"end": 492,
"score": 0.9865233302116394,
"start": 486,
"tag": "USERNAME",
"value": "tester"
},
{
"context": "ar /test/, -> # listen to tests\n pretend.user... | test/unit/05-director_test.coffee | PropertyUX/nubot-playbook | 0 | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
co = require 'co'
pretend = require 'hubot-pretend'
Dialogue = require '../../lib/modules/dialogue'
Scene = require '../../lib/modules/scene'
Director = require '../../lib/modules/director'
# init globals
clock = null
describe 'Director', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
clock = sinon.useFakeTimers()
pretend.user 'tester', id:'tester', room: 'testing'
Object.getOwnPropertyNames(Director.prototype).map (key) ->
sinon.spy Director.prototype, key
# generate first response for starting dialogues
pretend.robot.hear /test/, -> # listen to tests
pretend.user('tester').send 'test'
.then => @res = pretend.lastListen()
afterEach ->
pretend.shutdown()
clock.restore()
Object.getOwnPropertyNames(Director.prototype).map (key) ->
Director.prototype[key].restore()
describe 'constructor', ->
context 'without optional args', ->
beforeEach ->
@director = new Director pretend.robot
it 'has empty array names', ->
@director.names.should.eql []
context 'with authorise function', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise
it 'stores the given function as its authorise method', ->
@director.authorise = @authorise
context 'with options (denied reply and key string)', ->
beforeEach ->
@director = new Director pretend.robot,
deniedReply: "DENIED!"
key: 'Orson Welles'
it 'stores passed options in config', ->
@director.config.deniedReply.should.equal "DENIED!"
context 'with env var for config', ->
beforeEach ->
process.env.DENIED_REPLY = "403 Sorry."
@director = new Director pretend.robot
afterEach ->
delete process.env.DENIED_REPLY
it 'has default config with env inherited', ->
@director.config.should.eql
type: 'whitelist'
scope: 'username'
deniedReply: "403 Sorry."
context 'with env var for names', ->
beforeEach ->
process.env.WHITELIST_USERNAMES = 'Emmanuel'
process.env.WHITELIST_ROOMS = 'Capital'
process.env.BLACKLIST_USERNAMES = 'Winston,Julia,Syme'
process.env.BLACKLIST_ROOMS = 'Labour'
afterEach ->
delete process.env.WHITELIST_USERNAMES
delete process.env.WHITELIST_ROOMS
delete process.env.BLACKLIST_USERNAMES
delete process.env.BLACKLIST_ROOMS
context 'whitelist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'username'
it 'stores the whitelisted usernames from env', ->
@director.names.should.eql ['Emmanuel']
context 'whitelist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'room'
it 'stores the whitelisted rooms from env', ->
@director.names.should.eql ['Capital']
context 'blacklist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'username'
it 'stores the blacklisted usernames from env', ->
@director.names.should.eql ['Winston', 'Julia', 'Syme']
context 'blacklist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'room'
it 'stores the blacklisted rooms from env', ->
@director.names.should.eql ['Labour']
context 'with invalid option for type', ->
beforeEach ->
try @director = new Director pretend.robot,
type: 'pinklist'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'with invalid option for scope', ->
beforeEach ->
try @director = new Director pretend.robot,
scope: 'robot'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'without key, with authorise function and options', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise,
scope: 'room'
it 'uses options', ->
@director.config.scope.should.equal 'room'
it 'uses authorise function', ->
@director.authorise.should.eql @authorise
describe '.add', ->
beforeEach ->
@director = new Director pretend.robot
context 'given array of names', ->
beforeEach ->
@director.add ['pema', 'nima']
it 'stores them in the names array', ->
@director.names.should.eql ['pema', 'nima']
context 'given single name', ->
beforeEach ->
@director.add 'pema'
it 'stores it in the names array', ->
@director.names.should.eql ['pema']
context 'given array of names, some existing', ->
beforeEach ->
@director.names = ['yeon', 'juan']
@director.add ['pema', 'juan']
it 'adds any missing, not duplicating existing', ->
@director.names.should.eql ['yeon', 'juan', 'pema']
describe '.remove', ->
beforeEach ->
@director = new Director pretend.robot
@director.names = ['yeon', 'pema', 'juan', 'nima']
context 'given array of names', ->
beforeEach ->
@director.remove ['pema', 'nima']
it 'removes them from the names array', ->
@director.names.should.eql ['yeon', 'juan']
context 'with single name', ->
beforeEach ->
@director.remove 'pema'
it 'removes it from the names array', ->
@director.names.should.eql ['yeon', 'juan', 'nima']
context 'with array names, some not existing', ->
beforeEach ->
@director.remove ['frank', 'pema', 'juan', 'nima']
it 'removes any missing, ignoring others', ->
@director.names.should.eql ['yeon']
describe '.isAllowed', ->
context 'whitelist without authorise function', ->
context 'no list', ->
it 'returns false', ->
director = new Director pretend.robot
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username on list', ->
it 'returns true', ->
director = new Director pretend.robot
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username not on list', ->
it 'returns false', ->
director = new Director pretend.robot
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'blacklist without authorise function', ->
context 'no list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username on list', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username not on list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'whitelist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns true', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'blacklist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns false', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'room scope, blacklist room', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist', scope: 'room'
director.names = ['testing']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.false
context 'room scope, whitelist room', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'whitelist', scope: 'room'
director.names = ['testing']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.true
describe '.process', ->
it 'calls .isAllowed to determine if user is allowed or denied', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
res = pretend.response 'tester', 'testing'
director.process res
director.isAllowed.should.have.calledWith res
it 'returns a promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.process pretend.response 'tester', 'testing'
.then.should.be.a 'function'
it 'resolves to .isAllowed result', -> co ->
director = new Director pretend.robot
scene = new Scene pretend.robot
result = yield director.process pretend.response 'tester', 'testing'
result.should.equal director.isAllowed.returnValues.pop()
context 'with async auth function', ->
it 'resolves with auth function result after finished', ->
authorise = -> new Promise (resolve, reject) ->
done = -> resolve 'AUTHORISE'
setTimeout done, 10
new Director pretend.robot, authorise
.process pretend.response 'tester', 'test'
.then (result) -> result.should.equal 'AUTHORISE'
clock.tick 20
context 'denied with denied reply value', ->
it 'calls response method reply with reply value', -> co ->
director = new Director pretend.robot, deniedReply: 'DENIED'
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.have.calledWith 'DENIED'
context 'denied without denied reply value', ->
it 'does not call response reply method', -> co ->
director = new Director pretend.robot
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
context 'allowed user with denied reply value', ->
it 'calls .isAllowed to determine if user is allowed or denied', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
director.isAllowed.should.have.calledWith res
it 'resolves to same value as .isAllowed', -> co ->
director = new Director pretend.robot
director.names = ['tester']
result = yield director.process pretend.response 'tester', 'test'
result.should.equal director.isAllowed.returnValues.pop()
it 'does not send denied reply', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
describe '.directMatch', ->
context 'allowed user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback normally', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
context 'denied user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'prevents match callback from triggering', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'denied user sending unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directListener', ->
context 'with message matching directed listener id', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback when allowed', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
it 'prevents match callback when denied', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'with unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directScene', ->
beforeEach ->
sinon.spy Scene.prototype, 'enter'
sinon.spy Scene.prototype, 'processEnter'
afterEach ->
Scene.prototype.enter.restore()
Scene.prototype.processEnter.restore()
it 'scene enter middleware calls director .process', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
res = pretend.response 'tester', 'test'
scene.enter res # won't be alllowed without adding names
.catch -> director.process.should.have.calledWith res
context 'user allowed', ->
it 'allowed scene enter, resolves with context', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
keys = ['response', 'participants', 'options', 'arguments', 'dialogue']
director.directScene scene
director.names = ['tester']
scene.enter pretend.response 'tester', 'test'
.then (result) -> result.should.have.all.keys keys...
context 'user denied', ->
it 'preempts scene enter, rejects promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
scene.enter pretend.response 'tester', 'test'
.then () -> throw new Error 'promise should have caught'
.catch (err) -> err.should.be.instanceof Error
context 'with multiple scenes, only one directed', ->
it 'calls process only once for the directed scene', -> co ->
director = new Director pretend.robot
sceneA = new Scene pretend.robot
sceneB = new Scene pretend.robot
director.directScene sceneA
resA = pretend.response 'tester', 'let me in A'
resB = pretend.response 'tester', 'let me in A'
try
yield sceneA.enter resA
yield sceneB.enter resB
director.process.should.have.calledOnce
director.process.should.have.calledWithExactly resA
# TODO: Fix hack below. Because send middleware resolves before scene enter
# middleware, simply yielding on send will not allow asserting on the
# outcome of the enter middleware. Need to refactor pretend with updated
# nubot async features that use nextTick approach to ensure middleware only
# resolves when everything final
# FIX: I have disabled below because when all tests run together, something
# is breaking the setTimeout function, so it never fires the callback? 🤷
###
context 'allowed user sends message matching scene listener', ->
beforeEach -> clock.restore() # need real time to pass
it 'allows scene to process entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
director.names = ['tester']
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.have.calledOnce
callback.should.have.calledOnce
done()
, 50
context 'denied user sends message matching scene listener', ->
it 'prevents the scene from processing entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.not.have.called
callback.should.not.have.called
console.log('50 up')
done()
, 10
###
| 135145 | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
co = require 'co'
pretend = require 'hubot-pretend'
Dialogue = require '../../lib/modules/dialogue'
Scene = require '../../lib/modules/scene'
Director = require '../../lib/modules/director'
# init globals
clock = null
describe 'Director', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
clock = sinon.useFakeTimers()
pretend.user 'tester', id:'tester', room: 'testing'
Object.getOwnPropertyNames(Director.prototype).map (key) ->
sinon.spy Director.prototype, key
# generate first response for starting dialogues
pretend.robot.hear /test/, -> # listen to tests
pretend.user('tester').send 'test'
.then => @res = pretend.lastListen()
afterEach ->
pretend.shutdown()
clock.restore()
Object.getOwnPropertyNames(Director.prototype).map (key) ->
Director.prototype[key].restore()
describe 'constructor', ->
context 'without optional args', ->
beforeEach ->
@director = new Director pretend.robot
it 'has empty array names', ->
@director.names.should.eql []
context 'with authorise function', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise
it 'stores the given function as its authorise method', ->
@director.authorise = @authorise
context 'with options (denied reply and key string)', ->
beforeEach ->
@director = new Director pretend.robot,
deniedReply: "DENIED!"
key: '<KEY>'
it 'stores passed options in config', ->
@director.config.deniedReply.should.equal "DENIED!"
context 'with env var for config', ->
beforeEach ->
process.env.DENIED_REPLY = "403 Sorry."
@director = new Director pretend.robot
afterEach ->
delete process.env.DENIED_REPLY
it 'has default config with env inherited', ->
@director.config.should.eql
type: 'whitelist'
scope: 'username'
deniedReply: "403 Sorry."
context 'with env var for names', ->
beforeEach ->
process.env.WHITELIST_USERNAMES = '<NAME>'
process.env.WHITELIST_ROOMS = 'Capital'
process.env.BLACKLIST_USERNAMES = '<NAME>,<NAME>,<NAME>'
process.env.BLACKLIST_ROOMS = 'Labour'
afterEach ->
delete process.env.WHITELIST_USERNAMES
delete process.env.WHITELIST_ROOMS
delete process.env.BLACKLIST_USERNAMES
delete process.env.BLACKLIST_ROOMS
context 'whitelist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'username'
it 'stores the whitelisted usernames from env', ->
@director.names.should.eql ['<NAME>']
context 'whitelist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'room'
it 'stores the whitelisted rooms from env', ->
@director.names.should.eql ['Capital']
context 'blacklist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'username'
it 'stores the blacklisted usernames from env', ->
@director.names.should.eql ['<NAME>', '<NAME>', '<NAME>']
context 'blacklist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'room'
it 'stores the blacklisted rooms from env', ->
@director.names.should.eql ['Labour']
context 'with invalid option for type', ->
beforeEach ->
try @director = new Director pretend.robot,
type: 'pinklist'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'with invalid option for scope', ->
beforeEach ->
try @director = new Director pretend.robot,
scope: 'robot'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'without key, with authorise function and options', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise,
scope: 'room'
it 'uses options', ->
@director.config.scope.should.equal 'room'
it 'uses authorise function', ->
@director.authorise.should.eql @authorise
describe '.add', ->
beforeEach ->
@director = new Director pretend.robot
context 'given array of names', ->
beforeEach ->
@director.add ['<NAME>', '<NAME>']
it 'stores them in the names array', ->
@director.names.should.eql ['<NAME>', '<NAME>']
context 'given single name', ->
beforeEach ->
@director.add '<NAME>'
it 'stores it in the names array', ->
@director.names.should.eql ['<NAME>']
context 'given array of names, some existing', ->
beforeEach ->
@director.names = ['<NAME>', '<NAME>']
@director.add ['<NAME>', '<NAME>']
it 'adds any missing, not duplicating existing', ->
@director.names.should.eql ['<NAME>', '<NAME>', '<NAME>']
describe '.remove', ->
beforeEach ->
@director = new Director pretend.robot
@director.names = ['<NAME>', '<NAME>', '<NAME>', '<NAME>']
context 'given array of names', ->
beforeEach ->
@director.remove ['<NAME>', '<NAME>']
it 'removes them from the names array', ->
@director.names.should.eql ['<NAME>', '<NAME>']
context 'with single name', ->
beforeEach ->
@director.remove '<NAME>'
it 'removes it from the names array', ->
@director.names.should.eql ['<NAME>', '<NAME>', '<NAME>']
context 'with array names, some not existing', ->
beforeEach ->
@director.remove ['<NAME>', '<NAME>', '<NAME>', '<NAME>']
it 'removes any missing, ignoring others', ->
@director.names.should.eql ['<NAME>']
describe '.isAllowed', ->
context 'whitelist without authorise function', ->
context 'no list', ->
it 'returns false', ->
director = new Director pretend.robot
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username on list', ->
it 'returns true', ->
director = new Director pretend.robot
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username not on list', ->
it 'returns false', ->
director = new Director pretend.robot
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'blacklist without authorise function', ->
context 'no list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username on list', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username not on list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'whitelist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns true', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['<NAME> <NAME>']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'blacklist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns false', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['<NAME>obody']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'room scope, blacklist room', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist', scope: 'room'
director.names = ['<NAME>']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.false
context 'room scope, whitelist room', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'whitelist', scope: 'room'
director.names = ['testing']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.true
describe '.process', ->
it 'calls .isAllowed to determine if user is allowed or denied', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
res = pretend.response 'tester', 'testing'
director.process res
director.isAllowed.should.have.calledWith res
it 'returns a promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.process pretend.response 'tester', 'testing'
.then.should.be.a 'function'
it 'resolves to .isAllowed result', -> co ->
director = new Director pretend.robot
scene = new Scene pretend.robot
result = yield director.process pretend.response 'tester', 'testing'
result.should.equal director.isAllowed.returnValues.pop()
context 'with async auth function', ->
it 'resolves with auth function result after finished', ->
authorise = -> new Promise (resolve, reject) ->
done = -> resolve 'AUTHORISE'
setTimeout done, 10
new Director pretend.robot, authorise
.process pretend.response 'tester', 'test'
.then (result) -> result.should.equal 'AUTHORISE'
clock.tick 20
context 'denied with denied reply value', ->
it 'calls response method reply with reply value', -> co ->
director = new Director pretend.robot, deniedReply: 'DENIED'
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.have.calledWith 'DENIED'
context 'denied without denied reply value', ->
it 'does not call response reply method', -> co ->
director = new Director pretend.robot
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
context 'allowed user with denied reply value', ->
it 'calls .isAllowed to determine if user is allowed or denied', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
director.isAllowed.should.have.calledWith res
it 'resolves to same value as .isAllowed', -> co ->
director = new Director pretend.robot
director.names = ['tester']
result = yield director.process pretend.response 'tester', 'test'
result.should.equal director.isAllowed.returnValues.pop()
it 'does not send denied reply', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
describe '.directMatch', ->
context 'allowed user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback normally', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
context 'denied user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'prevents match callback from triggering', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'denied user sending unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directListener', ->
context 'with message matching directed listener id', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback when allowed', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
it 'prevents match callback when denied', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'with unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directScene', ->
beforeEach ->
sinon.spy Scene.prototype, 'enter'
sinon.spy Scene.prototype, 'processEnter'
afterEach ->
Scene.prototype.enter.restore()
Scene.prototype.processEnter.restore()
it 'scene enter middleware calls director .process', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
res = pretend.response 'tester', 'test'
scene.enter res # won't be alllowed without adding names
.catch -> director.process.should.have.calledWith res
context 'user allowed', ->
it 'allowed scene enter, resolves with context', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
keys = ['<KEY> <KEY> '<KEY> 'arguments<KEY>', '<KEY>']
director.directScene scene
director.names = ['tester']
scene.enter pretend.response 'tester', 'test'
.then (result) -> result.should.have.all.keys keys...
context 'user denied', ->
it 'preempts scene enter, rejects promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
scene.enter pretend.response 'tester', 'test'
.then () -> throw new Error 'promise should have caught'
.catch (err) -> err.should.be.instanceof Error
context 'with multiple scenes, only one directed', ->
it 'calls process only once for the directed scene', -> co ->
director = new Director pretend.robot
sceneA = new Scene pretend.robot
sceneB = new Scene pretend.robot
director.directScene sceneA
resA = pretend.response 'tester', 'let me in A'
resB = pretend.response 'tester', 'let me in A'
try
yield sceneA.enter resA
yield sceneB.enter resB
director.process.should.have.calledOnce
director.process.should.have.calledWithExactly resA
# TODO: Fix hack below. Because send middleware resolves before scene enter
# middleware, simply yielding on send will not allow asserting on the
# outcome of the enter middleware. Need to refactor pretend with updated
# nubot async features that use nextTick approach to ensure middleware only
# resolves when everything final
# FIX: I have disabled below because when all tests run together, something
# is breaking the setTimeout function, so it never fires the callback? 🤷
###
context 'allowed user sends message matching scene listener', ->
beforeEach -> clock.restore() # need real time to pass
it 'allows scene to process entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
director.names = ['tester']
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.have.calledOnce
callback.should.have.calledOnce
done()
, 50
context 'denied user sends message matching scene listener', ->
it 'prevents the scene from processing entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.not.have.called
callback.should.not.have.called
console.log('50 up')
done()
, 10
###
| true | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
co = require 'co'
pretend = require 'hubot-pretend'
Dialogue = require '../../lib/modules/dialogue'
Scene = require '../../lib/modules/scene'
Director = require '../../lib/modules/director'
# init globals
clock = null
describe 'Director', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
clock = sinon.useFakeTimers()
pretend.user 'tester', id:'tester', room: 'testing'
Object.getOwnPropertyNames(Director.prototype).map (key) ->
sinon.spy Director.prototype, key
# generate first response for starting dialogues
pretend.robot.hear /test/, -> # listen to tests
pretend.user('tester').send 'test'
.then => @res = pretend.lastListen()
afterEach ->
pretend.shutdown()
clock.restore()
Object.getOwnPropertyNames(Director.prototype).map (key) ->
Director.prototype[key].restore()
describe 'constructor', ->
context 'without optional args', ->
beforeEach ->
@director = new Director pretend.robot
it 'has empty array names', ->
@director.names.should.eql []
context 'with authorise function', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise
it 'stores the given function as its authorise method', ->
@director.authorise = @authorise
context 'with options (denied reply and key string)', ->
beforeEach ->
@director = new Director pretend.robot,
deniedReply: "DENIED!"
key: 'PI:KEY:<KEY>END_PI'
it 'stores passed options in config', ->
@director.config.deniedReply.should.equal "DENIED!"
context 'with env var for config', ->
beforeEach ->
process.env.DENIED_REPLY = "403 Sorry."
@director = new Director pretend.robot
afterEach ->
delete process.env.DENIED_REPLY
it 'has default config with env inherited', ->
@director.config.should.eql
type: 'whitelist'
scope: 'username'
deniedReply: "403 Sorry."
context 'with env var for names', ->
beforeEach ->
process.env.WHITELIST_USERNAMES = 'PI:NAME:<NAME>END_PI'
process.env.WHITELIST_ROOMS = 'Capital'
process.env.BLACKLIST_USERNAMES = 'PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI'
process.env.BLACKLIST_ROOMS = 'Labour'
afterEach ->
delete process.env.WHITELIST_USERNAMES
delete process.env.WHITELIST_ROOMS
delete process.env.BLACKLIST_USERNAMES
delete process.env.BLACKLIST_ROOMS
context 'whitelist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'username'
it 'stores the whitelisted usernames from env', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI']
context 'whitelist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'whitelist'
scope: 'room'
it 'stores the whitelisted rooms from env', ->
@director.names.should.eql ['Capital']
context 'blacklist type, username scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'username'
it 'stores the blacklisted usernames from env', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
context 'blacklist type, room scope', ->
beforeEach ->
@director = new Director pretend.robot,
type: 'blacklist'
scope: 'room'
it 'stores the blacklisted rooms from env', ->
@director.names.should.eql ['Labour']
context 'with invalid option for type', ->
beforeEach ->
try @director = new Director pretend.robot,
type: 'pinklist'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'with invalid option for scope', ->
beforeEach ->
try @director = new Director pretend.robot,
scope: 'robot'
it 'should throw error', ->
Director.prototype.constructor.should.throw
context 'without key, with authorise function and options', ->
beforeEach ->
@authorise = -> null
@director = new Director pretend.robot, @authorise,
scope: 'room'
it 'uses options', ->
@director.config.scope.should.equal 'room'
it 'uses authorise function', ->
@director.authorise.should.eql @authorise
describe '.add', ->
beforeEach ->
@director = new Director pretend.robot
context 'given array of names', ->
beforeEach ->
@director.add ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'stores them in the names array', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
context 'given single name', ->
beforeEach ->
@director.add 'PI:NAME:<NAME>END_PI'
it 'stores it in the names array', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI']
context 'given array of names, some existing', ->
beforeEach ->
@director.names = ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
@director.add ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'adds any missing, not duplicating existing', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
describe '.remove', ->
beforeEach ->
@director = new Director pretend.robot
@director.names = ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
context 'given array of names', ->
beforeEach ->
@director.remove ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'removes them from the names array', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
context 'with single name', ->
beforeEach ->
@director.remove 'PI:NAME:<NAME>END_PI'
it 'removes it from the names array', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
context 'with array names, some not existing', ->
beforeEach ->
@director.remove ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'removes any missing, ignoring others', ->
@director.names.should.eql ['PI:NAME:<NAME>END_PI']
describe '.isAllowed', ->
context 'whitelist without authorise function', ->
context 'no list', ->
it 'returns false', ->
director = new Director pretend.robot
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username on list', ->
it 'returns true', ->
director = new Director pretend.robot
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username not on list', ->
it 'returns false', ->
director = new Director pretend.robot
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'blacklist without authorise function', ->
context 'no list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'has list, username on list', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
context 'has list, username not on list', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'blacklist'
director.names = ['nobody']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
context 'whitelist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns true', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.true
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise
director.names = ['PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'blacklist with authorise function', ->
context 'no list', ->
it 'calls authorise function with username and res', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
res = pretend.response 'tester', 'test'
director.isAllowed res
authorise.should.have.calledWith 'tester', res
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'has list, username on list', ->
it 'returns false', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
.should.be.false
it 'does not call authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['tester']
director.isAllowed pretend.response 'tester', 'test'
authorise.should.not.have.been.calledOnce
context 'has list, username not on list', ->
it 'returns value of authorise function', ->
authorise = sinon.spy -> 'AUTHORISE'
director = new Director pretend.robot, authorise, type: 'blacklist'
director.names = ['PI:NAME:<NAME>END_PIobody']
director.isAllowed pretend.response 'tester', 'test'
.should.equal 'AUTHORISE'
context 'room scope, blacklist room', ->
it 'returns false', ->
director = new Director pretend.robot, type: 'blacklist', scope: 'room'
director.names = ['PI:NAME:<NAME>END_PI']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.false
context 'room scope, whitelist room', ->
it 'returns true', ->
director = new Director pretend.robot, type: 'whitelist', scope: 'room'
director.names = ['testing']
director.isAllowed pretend.response 'tester', 'test', 'testing'
.should.be.true
describe '.process', ->
it 'calls .isAllowed to determine if user is allowed or denied', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
res = pretend.response 'tester', 'testing'
director.process res
director.isAllowed.should.have.calledWith res
it 'returns a promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.process pretend.response 'tester', 'testing'
.then.should.be.a 'function'
it 'resolves to .isAllowed result', -> co ->
director = new Director pretend.robot
scene = new Scene pretend.robot
result = yield director.process pretend.response 'tester', 'testing'
result.should.equal director.isAllowed.returnValues.pop()
context 'with async auth function', ->
it 'resolves with auth function result after finished', ->
authorise = -> new Promise (resolve, reject) ->
done = -> resolve 'AUTHORISE'
setTimeout done, 10
new Director pretend.robot, authorise
.process pretend.response 'tester', 'test'
.then (result) -> result.should.equal 'AUTHORISE'
clock.tick 20
context 'denied with denied reply value', ->
it 'calls response method reply with reply value', -> co ->
director = new Director pretend.robot, deniedReply: 'DENIED'
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.have.calledWith 'DENIED'
context 'denied without denied reply value', ->
it 'does not call response reply method', -> co ->
director = new Director pretend.robot
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
context 'allowed user with denied reply value', ->
it 'calls .isAllowed to determine if user is allowed or denied', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
director.isAllowed.should.have.calledWith res
it 'resolves to same value as .isAllowed', -> co ->
director = new Director pretend.robot
director.names = ['tester']
result = yield director.process pretend.response 'tester', 'test'
result.should.equal director.isAllowed.returnValues.pop()
it 'does not send denied reply', -> co ->
director = new Director pretend.robot
director.names = ['tester']
res = pretend.response 'tester', 'test'
yield director.process res
res.reply.should.not.have.called
describe '.directMatch', ->
context 'allowed user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback normally', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
context 'denied user sending message matching directed match', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'prevents match callback from triggering', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, callback
director.directMatch /let me in/
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'denied user sending unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, ->
director.directMatch /let me in/
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directListener', ->
context 'with message matching directed listener id', ->
it 'calls .process to perform access checks and reply', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
director.process.should.have.calledOnce
it 'triggers match callback when allowed', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
director.names = ['tester']
yield pretend.user('tester').send 'let me in'
callback.should.have.calledOnce
it 'prevents match callback when denied', -> co ->
director = new Director pretend.robot
callback = sinon.spy()
pretend.robot.hear /let me in/, id: 'entry-test', callback
director.directListener 'entry-test'
yield pretend.user('tester').send 'let me in'
callback.should.not.have.called
context 'with unmatched message', ->
it 'does not call .process because middleware did not match', -> co ->
director = new Director pretend.robot
pretend.robot.hear /let me in/, id: 'entry-test', ->
director.directListener 'entry-test'
yield pretend.user('tester').send 'foo'
director.process.should.not.have.called
describe '.directScene', ->
beforeEach ->
sinon.spy Scene.prototype, 'enter'
sinon.spy Scene.prototype, 'processEnter'
afterEach ->
Scene.prototype.enter.restore()
Scene.prototype.processEnter.restore()
it 'scene enter middleware calls director .process', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
res = pretend.response 'tester', 'test'
scene.enter res # won't be alllowed without adding names
.catch -> director.process.should.have.calledWith res
context 'user allowed', ->
it 'allowed scene enter, resolves with context', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
keys = ['PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI 'argumentsPI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI']
director.directScene scene
director.names = ['tester']
scene.enter pretend.response 'tester', 'test'
.then (result) -> result.should.have.all.keys keys...
context 'user denied', ->
it 'preempts scene enter, rejects promise', ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
scene.enter pretend.response 'tester', 'test'
.then () -> throw new Error 'promise should have caught'
.catch (err) -> err.should.be.instanceof Error
context 'with multiple scenes, only one directed', ->
it 'calls process only once for the directed scene', -> co ->
director = new Director pretend.robot
sceneA = new Scene pretend.robot
sceneB = new Scene pretend.robot
director.directScene sceneA
resA = pretend.response 'tester', 'let me in A'
resB = pretend.response 'tester', 'let me in A'
try
yield sceneA.enter resA
yield sceneB.enter resB
director.process.should.have.calledOnce
director.process.should.have.calledWithExactly resA
# TODO: Fix hack below. Because send middleware resolves before scene enter
# middleware, simply yielding on send will not allow asserting on the
# outcome of the enter middleware. Need to refactor pretend with updated
# nubot async features that use nextTick approach to ensure middleware only
# resolves when everything final
# FIX: I have disabled below because when all tests run together, something
# is breaking the setTimeout function, so it never fires the callback? 🤷
###
context 'allowed user sends message matching scene listener', ->
beforeEach -> clock.restore() # need real time to pass
it 'allows scene to process entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
director.names = ['tester']
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.have.calledOnce
callback.should.have.calledOnce
done()
, 50
context 'denied user sends message matching scene listener', ->
it 'prevents the scene from processing entry', (done) ->
director = new Director pretend.robot
scene = new Scene pretend.robot
director.directScene scene
callback = sinon.spy()
scene.hear /let me in/, callback
pretend.user('tester').send 'let me in'
setTimeout () ->
scene.processEnter.should.not.have.called
callback.should.not.have.called
console.log('50 up')
done()
, 10
###
|
[
{
"context": "\n\t# based on http://stackoverflow.com/a/5158301 by James\n\tgetParameterByName = (name) ->\n\t\tif Utils.isInsi",
"end": 167,
"score": 0.7490590810775757,
"start": 162,
"tag": "NAME",
"value": "James"
},
{
"context": "\t# based on http://stackoverflow.com/a/11654596 b... | QueryInterface.coffee | marckrenn/framer-QueryInterface | 27 |
class exports.QueryInterface extends Framer.BaseClass
_allQueryInterfaces = [] unless _allQueryInterfaces?
# based on http://stackoverflow.com/a/5158301 by James
getParameterByName = (name) ->
if Utils.isInsideFramerCloud()
location = window.parent.location.search
else
location = window.location.search
match = RegExp("[?&]#{name}=([^&]*)").exec(location)
match and decodeURIComponent(match[1].replace(/\+/g, " "))
# based on http://stackoverflow.com/a/11654596 by ellemayo
updateQueryString = (key, value, url) ->
unless url?
if Utils.isInsideFramerCloud()
url = window.parent.location.href
else
url = window.location.href
key = key.replace("#", "%23")
value = value.replace("#", "%23") if typeof value is "string"
re = new RegExp("([?&])#{key}=.*?(&|#|$)(.*)", "gi")
hash = undefined
if re.test(url)
if typeof value isnt "undefined" and value isnt null
url.replace(re, "$1#{key}=#{value}$2$3")
else
hash = url.split("#")
url = hash[0].replace(re, "$1$3").replace(/(&|\?)$/, "")
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else
if typeof value isnt "undefined" and value isnt null
separator = if url.indexOf("?") isnt -1 then "&" else "?"
hash = url.split("#")
url = "#{hash[0]}#{separator}#{key}=#{value}"
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else url
@define "value",
get: ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.location.pathname
if getParameterByName(@key) and @fetchQuery
@value = @_parse(getParameterByName(@key), false)
else if @saveLocal is false or @loadLocal is false
if @_val is undefined or @_val is "undefined"
@default
else @_val
else if localStorage.getItem("#{locationPathName}?#{@key}=") and @loadLocal
localValue = localStorage.getItem("#{locationPathName}?#{@key}=")
if localValue is undefined or localValue is "undefined"
@reset()
else
val = @_parse(localValue, false)
else @value = @default
set: (val) ->
return if @default is undefined or @key is undefined
@_val = val = @_parse(val, true)
if @saveLocal
localStorage.setItem("#{window.location.pathname}?#{@key}=", val)
if @publish is true
newUrl = updateQueryString(@key, val)
if Utils.isFramerStudio() isnt true or @_forcePublish
try window.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
if Utils.isInsideIframe()
try window.parent.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
else
newUrl = updateQueryString(@key)
if Utils.isInsideIframe()
window.parent.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
else if Utils.isInsideIframe() is false
window.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
@define "type", get: -> typeof(@default)
@define "default",
get: -> @_default
set: (val) ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.parent.location.pathname
return if typeof val is "function" or @key is undefined
@_default = val
if localStorage.getItem("#{locationPathName}?#{@key}Default=")
savedDefault = localStorage.getItem("#{locationPathName}?#{@key}Default=")
parsedVal = val.toString()
localStorage.setItem("#{locationPathName}?#{@key}Default=", parsedVal)
if parsedVal isnt savedDefault
@reset() if Utils.isFramerStudio()
if localStorage.getItem("#{locationPathName}?#{@key}Type=")
savedType = localStorage.getItem("#{locationPathName}?#{@key}Type=")
newType = typeof val
localStorage.setItem("#{locationPathName}?#{@key}Type=", newType)
if savedType and newType isnt savedType
@reset()
constructor: (@options = {}) ->
@key = @options.key ?= undefined
@publish = @options.publish ?= true
@fetchQuery = @options.fetchQuery ?= true
@saveLocal = @options.saveLocal ?= true
@loadLocal = @options.loadLocal ?= true
@_forcePublish = false
super
_allQueryInterfaces.push(this)
@value = @value
_parse: (val, set) ->
if val is "/reset/" or val is "/default/"
val = @default
else
switch typeof @default
when "number"
if val is false or val is null or isNaN(val)
val = 0
else if val
val = Number(val)
val = @default if isNaN(val)
else val = @default
when "boolean"
switch typeof val
when "object" then val = Boolean(val)
when "undefined" then val = false
when "string"
if val.toLowerCase() is "true"
val = true
else if val.toLowerCase() is "false"
val = false
else val = true
when "number"
if val is 0 then val = false else val = true
when "string"
if val then val = val.toString() else val = @default
when "object"
if set
unless val is undefined or val is null
val = JSON.stringify(val)
else val = @default
else
unless val is undefined or val is null or val is "undefined" or val is "null"
val = JSON.parse(val)
else val = @default
return val
reset: -> @value = @default
@resetAll = ->
queryInterface.reset() for queryInterface in _allQueryInterfaces
newUrl = window.location.href.split('?')[0]
window.history.replaceState({path: newUrl},"Reset all QueryInterfaces", newUrl) if newUrl?
location.reload()
@query = ->
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = true
queryInterface.value = queryInterface.value
if Utils.isFramerStudio()
query = "?#{updateQueryString("reloader").split('?')[1]}".replace(/%22/g, "\"")
else
query =(window.location.search).replace(/%22/g, "\"")
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = false
queryInterface.value = queryInterface.value
return query
| 74456 |
class exports.QueryInterface extends Framer.BaseClass
_allQueryInterfaces = [] unless _allQueryInterfaces?
# based on http://stackoverflow.com/a/5158301 by <NAME>
getParameterByName = (name) ->
if Utils.isInsideFramerCloud()
location = window.parent.location.search
else
location = window.location.search
match = RegExp("[?&]#{name}=([^&]*)").exec(location)
match and decodeURIComponent(match[1].replace(/\+/g, " "))
# based on http://stackoverflow.com/a/11654596 by ellemayo
updateQueryString = (key, value, url) ->
unless url?
if Utils.isInsideFramerCloud()
url = window.parent.location.href
else
url = window.location.href
key = key.replace("#", "%23")
value = value.replace("#", "%23") if typeof value is "string"
re = new RegExp("([?&])#{key}=.*?(&|#|$)(.*)", "gi")
hash = undefined
if re.test(url)
if typeof value isnt "undefined" and value isnt null
url.replace(re, "$1#{key}=#{value}$2$3")
else
hash = url.split("#")
url = hash[0].replace(re, "$1$3").replace(/(&|\?)$/, "")
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else
if typeof value isnt "undefined" and value isnt null
separator = if url.indexOf("?") isnt -1 then "&" else "?"
hash = url.split("#")
url = "#{hash[0]}#{separator}#{key}=#{value}"
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else url
@define "value",
get: ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.location.pathname
if getParameterByName(@key) and @fetchQuery
@value = @_parse(getParameterByName(@key), false)
else if @saveLocal is false or @loadLocal is false
if @_val is undefined or @_val is "undefined"
@default
else @_val
else if localStorage.getItem("#{locationPathName}?#{@key}=") and @loadLocal
localValue = localStorage.getItem("#{locationPathName}?#{@key}=")
if localValue is undefined or localValue is "undefined"
@reset()
else
val = @_parse(localValue, false)
else @value = @default
set: (val) ->
return if @default is undefined or @key is undefined
@_val = val = @_parse(val, true)
if @saveLocal
localStorage.setItem("#{window.location.pathname}?#{@key}=", val)
if @publish is true
newUrl = updateQueryString(@key, val)
if Utils.isFramerStudio() isnt true or @_forcePublish
try window.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
if Utils.isInsideIframe()
try window.parent.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
else
newUrl = updateQueryString(@key)
if Utils.isInsideIframe()
window.parent.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
else if Utils.isInsideIframe() is false
window.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
@define "type", get: -> typeof(@default)
@define "default",
get: -> @_default
set: (val) ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.parent.location.pathname
return if typeof val is "function" or @key is undefined
@_default = val
if localStorage.getItem("#{locationPathName}?#{@key}Default=")
savedDefault = localStorage.getItem("#{locationPathName}?#{@key}Default=")
parsedVal = val.toString()
localStorage.setItem("#{locationPathName}?#{@key}Default=", parsedVal)
if parsedVal isnt savedDefault
@reset() if Utils.isFramerStudio()
if localStorage.getItem("#{locationPathName}?#{@key}Type=")
savedType = localStorage.getItem("#{locationPathName}?#{@key}Type=")
newType = typeof val
localStorage.setItem("#{locationPathName}?#{@key}Type=", newType)
if savedType and newType isnt savedType
@reset()
constructor: (@options = {}) ->
@key = @options.key ?= undefined
@publish = @options.publish ?= true
@fetchQuery = @options.fetchQuery ?= true
@saveLocal = @options.saveLocal ?= true
@loadLocal = @options.loadLocal ?= true
@_forcePublish = false
super
_allQueryInterfaces.push(this)
@value = @value
_parse: (val, set) ->
if val is "/reset/" or val is "/default/"
val = @default
else
switch typeof @default
when "number"
if val is false or val is null or isNaN(val)
val = 0
else if val
val = Number(val)
val = @default if isNaN(val)
else val = @default
when "boolean"
switch typeof val
when "object" then val = Boolean(val)
when "undefined" then val = false
when "string"
if val.toLowerCase() is "true"
val = true
else if val.toLowerCase() is "false"
val = false
else val = true
when "number"
if val is 0 then val = false else val = true
when "string"
if val then val = val.toString() else val = @default
when "object"
if set
unless val is undefined or val is null
val = JSON.stringify(val)
else val = @default
else
unless val is undefined or val is null or val is "undefined" or val is "null"
val = JSON.parse(val)
else val = @default
return val
reset: -> @value = @default
@resetAll = ->
queryInterface.reset() for queryInterface in _allQueryInterfaces
newUrl = window.location.href.split('?')[0]
window.history.replaceState({path: newUrl},"Reset all QueryInterfaces", newUrl) if newUrl?
location.reload()
@query = ->
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = true
queryInterface.value = queryInterface.value
if Utils.isFramerStudio()
query = "?#{updateQueryString("reloader").split('?')[1]}".replace(/%22/g, "\"")
else
query =(window.location.search).replace(/%22/g, "\"")
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = false
queryInterface.value = queryInterface.value
return query
| true |
class exports.QueryInterface extends Framer.BaseClass
_allQueryInterfaces = [] unless _allQueryInterfaces?
# based on http://stackoverflow.com/a/5158301 by PI:NAME:<NAME>END_PI
getParameterByName = (name) ->
if Utils.isInsideFramerCloud()
location = window.parent.location.search
else
location = window.location.search
match = RegExp("[?&]#{name}=([^&]*)").exec(location)
match and decodeURIComponent(match[1].replace(/\+/g, " "))
# based on http://stackoverflow.com/a/11654596 by ellemayo
updateQueryString = (key, value, url) ->
unless url?
if Utils.isInsideFramerCloud()
url = window.parent.location.href
else
url = window.location.href
key = key.replace("#", "%23")
value = value.replace("#", "%23") if typeof value is "string"
re = new RegExp("([?&])#{key}=.*?(&|#|$)(.*)", "gi")
hash = undefined
if re.test(url)
if typeof value isnt "undefined" and value isnt null
url.replace(re, "$1#{key}=#{value}$2$3")
else
hash = url.split("#")
url = hash[0].replace(re, "$1$3").replace(/(&|\?)$/, "")
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else
if typeof value isnt "undefined" and value isnt null
separator = if url.indexOf("?") isnt -1 then "&" else "?"
hash = url.split("#")
url = "#{hash[0]}#{separator}#{key}=#{value}"
url += "##{hash[1]}" if typeof hash[1] isnt "undefined" and hash[1] isnt null
return url
else url
@define "value",
get: ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.location.pathname
if getParameterByName(@key) and @fetchQuery
@value = @_parse(getParameterByName(@key), false)
else if @saveLocal is false or @loadLocal is false
if @_val is undefined or @_val is "undefined"
@default
else @_val
else if localStorage.getItem("#{locationPathName}?#{@key}=") and @loadLocal
localValue = localStorage.getItem("#{locationPathName}?#{@key}=")
if localValue is undefined or localValue is "undefined"
@reset()
else
val = @_parse(localValue, false)
else @value = @default
set: (val) ->
return if @default is undefined or @key is undefined
@_val = val = @_parse(val, true)
if @saveLocal
localStorage.setItem("#{window.location.pathname}?#{@key}=", val)
if @publish is true
newUrl = updateQueryString(@key, val)
if Utils.isFramerStudio() isnt true or @_forcePublish
try window.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
if Utils.isInsideIframe()
try window.parent.history.replaceState({path: newUrl}, "#{@key} changed to #{val}", newUrl)
else
newUrl = updateQueryString(@key)
if Utils.isInsideIframe()
window.parent.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
else if Utils.isInsideIframe() is false
window.history.replaceState({path: newUrl}, "#{@key} removed from URI", newUrl)
@define "type", get: -> typeof(@default)
@define "default",
get: -> @_default
set: (val) ->
if Utils.isInsideFramerCloud()
locationPathName = window.parent.location.pathname
else
locationPathName = window.parent.location.pathname
return if typeof val is "function" or @key is undefined
@_default = val
if localStorage.getItem("#{locationPathName}?#{@key}Default=")
savedDefault = localStorage.getItem("#{locationPathName}?#{@key}Default=")
parsedVal = val.toString()
localStorage.setItem("#{locationPathName}?#{@key}Default=", parsedVal)
if parsedVal isnt savedDefault
@reset() if Utils.isFramerStudio()
if localStorage.getItem("#{locationPathName}?#{@key}Type=")
savedType = localStorage.getItem("#{locationPathName}?#{@key}Type=")
newType = typeof val
localStorage.setItem("#{locationPathName}?#{@key}Type=", newType)
if savedType and newType isnt savedType
@reset()
constructor: (@options = {}) ->
@key = @options.key ?= undefined
@publish = @options.publish ?= true
@fetchQuery = @options.fetchQuery ?= true
@saveLocal = @options.saveLocal ?= true
@loadLocal = @options.loadLocal ?= true
@_forcePublish = false
super
_allQueryInterfaces.push(this)
@value = @value
_parse: (val, set) ->
if val is "/reset/" or val is "/default/"
val = @default
else
switch typeof @default
when "number"
if val is false or val is null or isNaN(val)
val = 0
else if val
val = Number(val)
val = @default if isNaN(val)
else val = @default
when "boolean"
switch typeof val
when "object" then val = Boolean(val)
when "undefined" then val = false
when "string"
if val.toLowerCase() is "true"
val = true
else if val.toLowerCase() is "false"
val = false
else val = true
when "number"
if val is 0 then val = false else val = true
when "string"
if val then val = val.toString() else val = @default
when "object"
if set
unless val is undefined or val is null
val = JSON.stringify(val)
else val = @default
else
unless val is undefined or val is null or val is "undefined" or val is "null"
val = JSON.parse(val)
else val = @default
return val
reset: -> @value = @default
@resetAll = ->
queryInterface.reset() for queryInterface in _allQueryInterfaces
newUrl = window.location.href.split('?')[0]
window.history.replaceState({path: newUrl},"Reset all QueryInterfaces", newUrl) if newUrl?
location.reload()
@query = ->
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = true
queryInterface.value = queryInterface.value
if Utils.isFramerStudio()
query = "?#{updateQueryString("reloader").split('?')[1]}".replace(/%22/g, "\"")
else
query =(window.location.search).replace(/%22/g, "\"")
for queryInterface in _allQueryInterfaces
queryInterface._forcePublish = false
queryInterface.value = queryInterface.value
return query
|
[
{
"context": "htmlContent) ->\n data =\n from: \"Kresus <kresus-noreply@cozycloud.cc>\"\n subjec",
"end": 4145,
"score": 0.9838488101959229,
"start": 4139,
"tag": "NAME",
"value": "Kresus"
},
{
"context": "ent) ->\n data =\n from: \"Kres... | server/lib/report-manager.coffee | almet/kresus | 0 | moment = require 'moment'
Client = require('request-json').JsonClient
jade = require 'jade'
BankAlert = require '../models/bankalert'
BankOperation = require '../models/bankoperation'
BankAccount = require '../models/bankaccount'
class ReportManager
constructor: ->
@client = new Client "http://localhost:9101/"
unless process.env.NODE_ENV not in ["production", "test"]
@client.setBasicAuth process.env.NAME, process.env.TOKEN
start: ->
@prepareNextCheck()
prepareNextCheck: ->
# day after between 02:00am and 04:00am
# this must be triggered AFTER accounts were polled
delta = Math.floor(Math.random() * 120)
now = moment()
nextUpdate = now.clone().add(1, 'days')
.hours(2)
.minutes(delta)
.seconds(0)
format = "DD/MM/YYYY [at] HH:mm:ss"
console.log "> Next check to send report #{nextUpdate.format(format)}"
@timeout = setTimeout(
() =>
@manageReports()
, nextUpdate.diff(now))
manageReports: ->
now = moment()
@prepareReport 'daily'
@prepareReport 'weekly' if now.day() is 1
@prepareReport 'monthly' if now.date() is 1
@prepareNextCheck()
prepareReport: (frequency) ->
console.log "Checking if user has enabled #{frequency} report..."
BankAlert.allReportsByFrequency frequency, (err, alerts) =>
if err?
msg = "Couldn't retrieve alerts -- #{err}"
console.log msg
callback msg
else
# bank accounts for reports should be generated for
includedBankAccounts = []
includedBankAccounts.push alert.bankAccount for alert in alerts
if alerts.length > 0
@_prepareOperationsData frequency, includedBankAccounts, \
(err, operationsByAccount) =>
@_prepareBalancesData frequency, includedBankAccounts, \
(err, accounts) =>
if accounts.length > 0
textContent = \
@_getTextContent operationsByAccount,\
accounts, frequency
htmlContent = \
@_getHtmlContent operationsByAccount,\
accounts, frequency
@_sendReport frequency, textContent, htmlContent
else
console.log "User hasn't enabled #{frequency} report."
_prepareBalancesData: (frequency, accounts, callback) ->
BankAccount.findMany accounts, (err, accounts) ->
if err?
msg = "Couldn't retrieve accounts -- #{err}"
console.log msg
callback msg
else
callback null, accounts
_prepareOperationsData: (frequency, accounts, callback) ->
BankOperation.allFromBankAccount accounts, (err, operations) =>
if err?
msg = "Couldn't retrieve operations -- #{err}"
console.log msg
callback msg
else
# choose the ones which are in the right time frame
operationsByAccount = {}
timeFrame = @_getTimeFrame frequency
for operation in operations
account = operation.bankAccount
if operation.dateImport then date = operation.dateImport
else date = operation.date
if moment(date).isAfter timeFrame
unless operationsByAccount[account]?
operationsByAccount[account] = []
operationsByAccount[account].push operation
callback null, operationsByAccount
_sendReport: (frequency, textContent, htmlContent) ->
data =
from: "Kresus <kresus-noreply@cozycloud.cc>"
subject: "[Kresus] #{frequency} report"
content: textContent
html: htmlContent
@client.post "mail/to-user/", data, (err, res, body) ->
if err?
msg = "An error occurred while sending an email"
console.log "#{msg} -- #{err}"
console.log res.statusCode if res?
else
console.log "Report sent."
_getTextContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
output = "Votre rapport bancaire du #{today}\n\n"
output += "Solde de vos comptes :\n"
for account in accounts
lastCheck = moment(account.lastCheck).format "DD/MM/YYYY"
output += "\t* #{account.accountNumber} (#{account.title}) " + \
"# #{account.getBalance()}€ " + \
"(Dernière vérification : #{lastCheck})\n"
if Object.keys(operationsByAccount).length > 0
output += "\nNouvelles opérations importées :\n"
for account, operations of operationsByAccount
output += "Compte n°#{account}\n"
for operation in operations
output += "\t* #{operation.title} # #{operation.amount}€"+ \
" # (#{moment(operation.date).format("DD/MM/YYYY")})\n"
else
output = "Aucune nouvelle opération n'a été importée #{frequency}."
return output
_getHtmlContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
options =
today: today
accounts: accounts
operationsByAccount: operationsByAccount
return jade.renderFile './server/views/mail-report.jade', options
_getTimeFrame: (frequency) ->
timeFrame = moment()
switch frequency
when "daily"
return timeFrame.subtract("days", 1)
.hours(0).minutes(0).seconds(0)
when "weekly"
return timeFrame.subtract("days", 7)
.hours(0).minutes(0).seconds(0)
when "monthly"
return timeFrame.subtract("months", 1)
.days(0).hours(0).minutes(0).seconds(0)
module.exports = new ReportManager()
| 3645 | moment = require 'moment'
Client = require('request-json').JsonClient
jade = require 'jade'
BankAlert = require '../models/bankalert'
BankOperation = require '../models/bankoperation'
BankAccount = require '../models/bankaccount'
class ReportManager
constructor: ->
@client = new Client "http://localhost:9101/"
unless process.env.NODE_ENV not in ["production", "test"]
@client.setBasicAuth process.env.NAME, process.env.TOKEN
start: ->
@prepareNextCheck()
prepareNextCheck: ->
# day after between 02:00am and 04:00am
# this must be triggered AFTER accounts were polled
delta = Math.floor(Math.random() * 120)
now = moment()
nextUpdate = now.clone().add(1, 'days')
.hours(2)
.minutes(delta)
.seconds(0)
format = "DD/MM/YYYY [at] HH:mm:ss"
console.log "> Next check to send report #{nextUpdate.format(format)}"
@timeout = setTimeout(
() =>
@manageReports()
, nextUpdate.diff(now))
manageReports: ->
now = moment()
@prepareReport 'daily'
@prepareReport 'weekly' if now.day() is 1
@prepareReport 'monthly' if now.date() is 1
@prepareNextCheck()
prepareReport: (frequency) ->
console.log "Checking if user has enabled #{frequency} report..."
BankAlert.allReportsByFrequency frequency, (err, alerts) =>
if err?
msg = "Couldn't retrieve alerts -- #{err}"
console.log msg
callback msg
else
# bank accounts for reports should be generated for
includedBankAccounts = []
includedBankAccounts.push alert.bankAccount for alert in alerts
if alerts.length > 0
@_prepareOperationsData frequency, includedBankAccounts, \
(err, operationsByAccount) =>
@_prepareBalancesData frequency, includedBankAccounts, \
(err, accounts) =>
if accounts.length > 0
textContent = \
@_getTextContent operationsByAccount,\
accounts, frequency
htmlContent = \
@_getHtmlContent operationsByAccount,\
accounts, frequency
@_sendReport frequency, textContent, htmlContent
else
console.log "User hasn't enabled #{frequency} report."
_prepareBalancesData: (frequency, accounts, callback) ->
BankAccount.findMany accounts, (err, accounts) ->
if err?
msg = "Couldn't retrieve accounts -- #{err}"
console.log msg
callback msg
else
callback null, accounts
_prepareOperationsData: (frequency, accounts, callback) ->
BankOperation.allFromBankAccount accounts, (err, operations) =>
if err?
msg = "Couldn't retrieve operations -- #{err}"
console.log msg
callback msg
else
# choose the ones which are in the right time frame
operationsByAccount = {}
timeFrame = @_getTimeFrame frequency
for operation in operations
account = operation.bankAccount
if operation.dateImport then date = operation.dateImport
else date = operation.date
if moment(date).isAfter timeFrame
unless operationsByAccount[account]?
operationsByAccount[account] = []
operationsByAccount[account].push operation
callback null, operationsByAccount
_sendReport: (frequency, textContent, htmlContent) ->
data =
from: "<NAME> <<EMAIL>>"
subject: "[Kresus] #{frequency} report"
content: textContent
html: htmlContent
@client.post "mail/to-user/", data, (err, res, body) ->
if err?
msg = "An error occurred while sending an email"
console.log "#{msg} -- #{err}"
console.log res.statusCode if res?
else
console.log "Report sent."
_getTextContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
output = "Votre rapport bancaire du #{today}\n\n"
output += "Solde de vos comptes :\n"
for account in accounts
lastCheck = moment(account.lastCheck).format "DD/MM/YYYY"
output += "\t* #{account.accountNumber} (#{account.title}) " + \
"# #{account.getBalance()}€ " + \
"(Dernière vérification : #{lastCheck})\n"
if Object.keys(operationsByAccount).length > 0
output += "\nNouvelles opérations importées :\n"
for account, operations of operationsByAccount
output += "Compte n°#{account}\n"
for operation in operations
output += "\t* #{operation.title} # #{operation.amount}€"+ \
" # (#{moment(operation.date).format("DD/MM/YYYY")})\n"
else
output = "Aucune nouvelle opération n'a été importée #{frequency}."
return output
_getHtmlContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
options =
today: today
accounts: accounts
operationsByAccount: operationsByAccount
return jade.renderFile './server/views/mail-report.jade', options
_getTimeFrame: (frequency) ->
timeFrame = moment()
switch frequency
when "daily"
return timeFrame.subtract("days", 1)
.hours(0).minutes(0).seconds(0)
when "weekly"
return timeFrame.subtract("days", 7)
.hours(0).minutes(0).seconds(0)
when "monthly"
return timeFrame.subtract("months", 1)
.days(0).hours(0).minutes(0).seconds(0)
module.exports = new ReportManager()
| true | moment = require 'moment'
Client = require('request-json').JsonClient
jade = require 'jade'
BankAlert = require '../models/bankalert'
BankOperation = require '../models/bankoperation'
BankAccount = require '../models/bankaccount'
class ReportManager
constructor: ->
@client = new Client "http://localhost:9101/"
unless process.env.NODE_ENV not in ["production", "test"]
@client.setBasicAuth process.env.NAME, process.env.TOKEN
start: ->
@prepareNextCheck()
prepareNextCheck: ->
# day after between 02:00am and 04:00am
# this must be triggered AFTER accounts were polled
delta = Math.floor(Math.random() * 120)
now = moment()
nextUpdate = now.clone().add(1, 'days')
.hours(2)
.minutes(delta)
.seconds(0)
format = "DD/MM/YYYY [at] HH:mm:ss"
console.log "> Next check to send report #{nextUpdate.format(format)}"
@timeout = setTimeout(
() =>
@manageReports()
, nextUpdate.diff(now))
manageReports: ->
now = moment()
@prepareReport 'daily'
@prepareReport 'weekly' if now.day() is 1
@prepareReport 'monthly' if now.date() is 1
@prepareNextCheck()
prepareReport: (frequency) ->
console.log "Checking if user has enabled #{frequency} report..."
BankAlert.allReportsByFrequency frequency, (err, alerts) =>
if err?
msg = "Couldn't retrieve alerts -- #{err}"
console.log msg
callback msg
else
# bank accounts for reports should be generated for
includedBankAccounts = []
includedBankAccounts.push alert.bankAccount for alert in alerts
if alerts.length > 0
@_prepareOperationsData frequency, includedBankAccounts, \
(err, operationsByAccount) =>
@_prepareBalancesData frequency, includedBankAccounts, \
(err, accounts) =>
if accounts.length > 0
textContent = \
@_getTextContent operationsByAccount,\
accounts, frequency
htmlContent = \
@_getHtmlContent operationsByAccount,\
accounts, frequency
@_sendReport frequency, textContent, htmlContent
else
console.log "User hasn't enabled #{frequency} report."
_prepareBalancesData: (frequency, accounts, callback) ->
BankAccount.findMany accounts, (err, accounts) ->
if err?
msg = "Couldn't retrieve accounts -- #{err}"
console.log msg
callback msg
else
callback null, accounts
_prepareOperationsData: (frequency, accounts, callback) ->
BankOperation.allFromBankAccount accounts, (err, operations) =>
if err?
msg = "Couldn't retrieve operations -- #{err}"
console.log msg
callback msg
else
# choose the ones which are in the right time frame
operationsByAccount = {}
timeFrame = @_getTimeFrame frequency
for operation in operations
account = operation.bankAccount
if operation.dateImport then date = operation.dateImport
else date = operation.date
if moment(date).isAfter timeFrame
unless operationsByAccount[account]?
operationsByAccount[account] = []
operationsByAccount[account].push operation
callback null, operationsByAccount
_sendReport: (frequency, textContent, htmlContent) ->
data =
from: "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"
subject: "[Kresus] #{frequency} report"
content: textContent
html: htmlContent
@client.post "mail/to-user/", data, (err, res, body) ->
if err?
msg = "An error occurred while sending an email"
console.log "#{msg} -- #{err}"
console.log res.statusCode if res?
else
console.log "Report sent."
_getTextContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
output = "Votre rapport bancaire du #{today}\n\n"
output += "Solde de vos comptes :\n"
for account in accounts
lastCheck = moment(account.lastCheck).format "DD/MM/YYYY"
output += "\t* #{account.accountNumber} (#{account.title}) " + \
"# #{account.getBalance()}€ " + \
"(Dernière vérification : #{lastCheck})\n"
if Object.keys(operationsByAccount).length > 0
output += "\nNouvelles opérations importées :\n"
for account, operations of operationsByAccount
output += "Compte n°#{account}\n"
for operation in operations
output += "\t* #{operation.title} # #{operation.amount}€"+ \
" # (#{moment(operation.date).format("DD/MM/YYYY")})\n"
else
output = "Aucune nouvelle opération n'a été importée #{frequency}."
return output
_getHtmlContent: (operationsByAccount, accounts, frequency) ->
today = moment().format "DD/MM/YYYY"
options =
today: today
accounts: accounts
operationsByAccount: operationsByAccount
return jade.renderFile './server/views/mail-report.jade', options
_getTimeFrame: (frequency) ->
timeFrame = moment()
switch frequency
when "daily"
return timeFrame.subtract("days", 1)
.hours(0).minutes(0).seconds(0)
when "weekly"
return timeFrame.subtract("days", 7)
.hours(0).minutes(0).seconds(0)
when "monthly"
return timeFrame.subtract("months", 1)
.days(0).hours(0).minutes(0).seconds(0)
module.exports = new ReportManager()
|
[
{
"context": "ubot raptor me\n#\n# Notes:\n# None\n#\n# Author:\n# Joe Groseclose <@benderTheCrime>\n\nrequest = require 'request'\n\nu",
"end": 168,
"score": 0.9998384714126587,
"start": 154,
"tag": "NAME",
"value": "Joe Groseclose"
},
{
"context": "# Notes:\n# None\n#\n# Auth... | src/random-dino.coffee | benderTheCrime/hubot-random-dino | 0 | # Description
# Post a random dino!
#
# Configuration:
# NONE
#
# Commands:
# hubot dino me
# hubot raptor me
#
# Notes:
# None
#
# Author:
# Joe Groseclose <@benderTheCrime>
request = require 'request'
url = 'http://api.giphy.com/v1/gifs/search?api_key=dc6zaTOxFJmzC&q='
dinoUrl = "#{url}dinosaur"
raptorUrl = "#{url}raptor"
module.exports = (robot) ->
robot.respond /dino me/i, (res) -> getGiphy dinoUrl, res
robot.respond /raptor me/i, (res) -> getGiphy raptorUrl, res
getGiphy = (url, res = { send: (url) -> console.log url }) ->
request.get (url || dinoUrl), (e, r, body) ->
dinos = JSON.parse(body).data
if !e && r.statusCode == 200
dino = dinos[ random dinos ]
res.send dino.images[ 'downsized_medium' ].url
random = (arr) -> Math.floor (Math.random() * arr.length)
| 42273 | # Description
# Post a random dino!
#
# Configuration:
# NONE
#
# Commands:
# hubot dino me
# hubot raptor me
#
# Notes:
# None
#
# Author:
# <NAME> <@benderTheCrime>
request = require 'request'
url = 'http://api.giphy.com/v1/gifs/search?api_key=<KEY>&q='
dinoUrl = "#{url}dinosaur"
raptorUrl = "#{url}raptor"
module.exports = (robot) ->
robot.respond /dino me/i, (res) -> getGiphy dinoUrl, res
robot.respond /raptor me/i, (res) -> getGiphy raptorUrl, res
getGiphy = (url, res = { send: (url) -> console.log url }) ->
request.get (url || dinoUrl), (e, r, body) ->
dinos = JSON.parse(body).data
if !e && r.statusCode == 200
dino = dinos[ random dinos ]
res.send dino.images[ 'downsized_medium' ].url
random = (arr) -> Math.floor (Math.random() * arr.length)
| true | # Description
# Post a random dino!
#
# Configuration:
# NONE
#
# Commands:
# hubot dino me
# hubot raptor me
#
# Notes:
# None
#
# Author:
# PI:NAME:<NAME>END_PI <@benderTheCrime>
request = require 'request'
url = 'http://api.giphy.com/v1/gifs/search?api_key=PI:KEY:<KEY>END_PI&q='
dinoUrl = "#{url}dinosaur"
raptorUrl = "#{url}raptor"
module.exports = (robot) ->
robot.respond /dino me/i, (res) -> getGiphy dinoUrl, res
robot.respond /raptor me/i, (res) -> getGiphy raptorUrl, res
getGiphy = (url, res = { send: (url) -> console.log url }) ->
request.get (url || dinoUrl), (e, r, body) ->
dinos = JSON.parse(body).data
if !e && r.statusCode == 200
dino = dinos[ random dinos ]
res.send dino.images[ 'downsized_medium' ].url
random = (arr) -> Math.floor (Math.random() * arr.length)
|
[
{
"context": " - [ ] cool [firebase Vue Dapp](https://medium.com/@sebinatx/building-an-ethereum-firebase-user-profile-dapp-p",
"end": 1107,
"score": 0.9993361234664917,
"start": 1098,
"tag": "USERNAME",
"value": "@sebinatx"
},
{
"context": " - [ ] ALso get BOTUI working, https://gith... | notes/c08bee54-8c8a-44db-9a9b-65525180318f.cson | wikiGrandfleet/gravRepo | 0 | createdAt: "2018-07-19T01:25:14.343Z"
updatedAt: "2018-08-04T16:12:22.526Z"
type: "MARKDOWN_NOTE"
folder: "5cef29a72e245ea24273"
title: "June and July"
content: '''
# June and July
keep adding new entries to the top
## July-August
### Todo List July and Onward
!!! error Current Tasks Highest priority
**Current tasks** get done quickly.
- [x] finishing the ENGR 446 Report
- [ ] Finish the ENGR 003 Report
- [ ] Reading documentation for IOT in both markdown and latex.
- [x] Get vuepress theme working for boostnote, compiling and running vuepress from gitlab, so two jobs, one to compile the "docs" folder or other files, and then another to build the vuepress theme and then create static documentation.
- [x] can import google material icons better and get this working.
- [ ] If boostnote html export is fixed soon, see if katex can be embedded without packages, only using head tags or equvialent.
!!!
!!! danger Major Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] cool [firebase Vue Dapp](https://medium.com/@sebinatx/building-an-ethereum-firebase-user-profile-dapp-part-2-226bcc11ae62, )
- [ ] Soldiity contracts repo for games, explaining how to use then and more.
- [ ] Fix python script on gitlab, auto sending reports to self, with probably jinja based format and maybe latex output.
!!!
!!! caution Minor Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] ALso get BOTUI working, https://github.com/botui/botui, so I can get vue based protoflio?
- [ ] Porotflio theme done in vuepress or other.
- [ ] Update the automatic navbar/sidebar generation for markdown-notes-template.
!!!
#### Section of open source projects
!!! attention Vuepress-theme-cool
This contains the todo list for my open source vuepress theme
- [x] [GitHub - docarys/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/docarys/markdown-it-admonition)
- [ ] Adding mindmap package for documentation
- [ ] Find out how many humans are downloading my packages lol.
!!!
!!! hint possibly list of things todo
Do if bored.
- [ ] Update pirates game to use webpack
- [ ] Update python script that parses uvic job board to use requests?
- [ ] update dash dashboard
- [ ] Deploy some garbage apps using go
- [ ] Update documentation for vuepress-theme-cool and markdown-notes-template and provide a cheatsheet of what boostnote/vuepress can do together and limitations.
[百度脑图-便捷的思维工具](http://naotu.baidu.com/file/1d400c82d65cd3fd530ffc55d8eb3783?token=5e57783dc51b3617) Copy mind mpa from that.

!!!
!!! note References
[GitHub - johannbre/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/johannbre/markdown-it-admonition)
!!!
'''
tags: [
"COol"
]
isStarred: true
isTrashed: false
isPinned: false
| 163088 | createdAt: "2018-07-19T01:25:14.343Z"
updatedAt: "2018-08-04T16:12:22.526Z"
type: "MARKDOWN_NOTE"
folder: "5cef29a72e245ea24273"
title: "June and July"
content: '''
# June and July
keep adding new entries to the top
## July-August
### Todo List July and Onward
!!! error Current Tasks Highest priority
**Current tasks** get done quickly.
- [x] finishing the ENGR 446 Report
- [ ] Finish the ENGR 003 Report
- [ ] Reading documentation for IOT in both markdown and latex.
- [x] Get vuepress theme working for boostnote, compiling and running vuepress from gitlab, so two jobs, one to compile the "docs" folder or other files, and then another to build the vuepress theme and then create static documentation.
- [x] can import google material icons better and get this working.
- [ ] If boostnote html export is fixed soon, see if katex can be embedded without packages, only using head tags or equvialent.
!!!
!!! danger Major Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] cool [firebase Vue Dapp](https://medium.com/@sebinatx/building-an-ethereum-firebase-user-profile-dapp-part-2-226bcc11ae62, )
- [ ] Soldiity contracts repo for games, explaining how to use then and more.
- [ ] Fix python script on gitlab, auto sending reports to self, with probably jinja based format and maybe latex output.
!!!
!!! caution Minor Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] ALso get BOTUI working, https://github.com/botui/botui, so I can get vue based protoflio?
- [ ] Porotflio theme done in vuepress or other.
- [ ] Update the automatic navbar/sidebar generation for markdown-notes-template.
!!!
#### Section of open source projects
!!! attention Vuepress-theme-cool
This contains the todo list for my open source vuepress theme
- [x] [GitHub - docarys/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/docarys/markdown-it-admonition)
- [ ] Adding mindmap package for documentation
- [ ] Find out how many humans are downloading my packages lol.
!!!
!!! hint possibly list of things todo
Do if bored.
- [ ] Update pirates game to use webpack
- [ ] Update python script that parses uvic job board to use requests?
- [ ] update dash dashboard
- [ ] Deploy some garbage apps using go
- [ ] Update documentation for vuepress-theme-cool and markdown-notes-template and provide a cheatsheet of what boostnote/vuepress can do together and limitations.
[百度脑图-便捷的思维工具](http://naotu.baidu.com/file/1d400c82d65cd3fd530ffc55d8eb3783?token=<PASSWORD>) Copy mind mpa from that.

!!!
!!! note References
[GitHub - johannbre/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/johannbre/markdown-it-admonition)
!!!
'''
tags: [
"COol"
]
isStarred: true
isTrashed: false
isPinned: false
| true | createdAt: "2018-07-19T01:25:14.343Z"
updatedAt: "2018-08-04T16:12:22.526Z"
type: "MARKDOWN_NOTE"
folder: "5cef29a72e245ea24273"
title: "June and July"
content: '''
# June and July
keep adding new entries to the top
## July-August
### Todo List July and Onward
!!! error Current Tasks Highest priority
**Current tasks** get done quickly.
- [x] finishing the ENGR 446 Report
- [ ] Finish the ENGR 003 Report
- [ ] Reading documentation for IOT in both markdown and latex.
- [x] Get vuepress theme working for boostnote, compiling and running vuepress from gitlab, so two jobs, one to compile the "docs" folder or other files, and then another to build the vuepress theme and then create static documentation.
- [x] can import google material icons better and get this working.
- [ ] If boostnote html export is fixed soon, see if katex can be embedded without packages, only using head tags or equvialent.
!!!
!!! danger Major Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] cool [firebase Vue Dapp](https://medium.com/@sebinatx/building-an-ethereum-firebase-user-profile-dapp-part-2-226bcc11ae62, )
- [ ] Soldiity contracts repo for games, explaining how to use then and more.
- [ ] Fix python script on gitlab, auto sending reports to self, with probably jinja based format and maybe latex output.
!!!
!!! caution Minor Backlog
The tasks listed below are expected to be done by the end of the term.
- [ ] ALso get BOTUI working, https://github.com/botui/botui, so I can get vue based protoflio?
- [ ] Porotflio theme done in vuepress or other.
- [ ] Update the automatic navbar/sidebar generation for markdown-notes-template.
!!!
#### Section of open source projects
!!! attention Vuepress-theme-cool
This contains the todo list for my open source vuepress theme
- [x] [GitHub - docarys/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/docarys/markdown-it-admonition)
- [ ] Adding mindmap package for documentation
- [ ] Find out how many humans are downloading my packages lol.
!!!
!!! hint possibly list of things todo
Do if bored.
- [ ] Update pirates game to use webpack
- [ ] Update python script that parses uvic job board to use requests?
- [ ] update dash dashboard
- [ ] Deploy some garbage apps using go
- [ ] Update documentation for vuepress-theme-cool and markdown-notes-template and provide a cheatsheet of what boostnote/vuepress can do together and limitations.
[百度脑图-便捷的思维工具](http://naotu.baidu.com/file/1d400c82d65cd3fd530ffc55d8eb3783?token=PI:PASSWORD:<PASSWORD>END_PI) Copy mind mpa from that.

!!!
!!! note References
[GitHub - johannbre/markdown-it-admonition: An admonition plugin for markdown-it powered by docarys.io](https://github.com/johannbre/markdown-it-admonition)
!!!
'''
tags: [
"COol"
]
isStarred: true
isTrashed: false
isPinned: false
|
[
{
"context": ".Info =\n\ttitle: \"Infinity looping list\"\n\tauthor: \"Jungho song\"\n\ttwitter: \"threeword\"\n\tdescription: \"- Support u",
"end": 174,
"score": 0.9998811483383179,
"start": 163,
"tag": "NAME",
"value": "Jungho song"
},
{
"context": "y looping list\"\n\tauthor: \"J... | app.coffee | framer-modules/infinity_list.framer | 4 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: "Infinity looping list"
author: "Jungho song"
twitter: "threeword"
description: "- Support up and down scroll.\n- Recycle item layer of list.\n\n*Compatible with all devices."
Framer.Extras.ShareInfo.enable()
# Dummy data (Use http://dummi.io)
dummi = [
{
"id": 0,
"fullName": "Winifred Nash",
"title": "Recusandae Unde Sapiente Deleniti",
"datePublished": "06/03/2003"
},
{
"id": 1,
"fullName": "Edith Porter",
"title": "Optio",
"datePublished": "08/03/2013"
},
{
"id": 2,
"fullName": "Sophie Graves",
"title": "Dicta",
"datePublished": "04/16/2006"
},
{
"id": 3,
"fullName": "Leona Turner",
"title": "Sapiente Corporis Delectus",
"datePublished": "01/09/2002"
},
{
"id": 4,
"fullName": "Genevieve Moran",
"title": "Neque",
"datePublished": "08/03/2008"
},
{
"id": 5,
"fullName": "Lois Tucker",
"title": "Quam Minima Incidunt Impedit",
"datePublished": "02/24/2016"
},
{
"id": 6,
"fullName": "Mamie Norris",
"title": "Labore Suscipit Nesciunt Facere",
"datePublished": "01/10/2001"
},
{
"id": 7,
"fullName": "Hilda Medina",
"title": "Alias Voluptas Atque Voluptatum Ea",
"datePublished": "12/11/2007"
},
{
"id": 8,
"fullName": "Flora Hart",
"title": "Est Soluta Sunt Vel",
"datePublished": "01/22/2007"
},
{
"id": 9,
"fullName": "Bernice Hunt",
"title": "Obcaecati In",
"datePublished": "02/02/2000"
},
{
"id": 10,
"fullName": "Matilda Nguyen",
"title": "Doloremque Soluta Architecto",
"datePublished": "05/25/2008"
},
{
"id": 11,
"fullName": "Lenora Douglas",
"title": "Libero",
"datePublished": "09/29/2012"
},
{
"id": 12,
"fullName": "Harriett Robbins",
"title": "Quis Tempora Suscipit",
"datePublished": "08/04/2013"
},
{
"id": 13,
"fullName": "Cynthia Howard",
"title": "Inventore Molestias Culpa Accusantium",
"datePublished": "03/04/2012"
},
{
"id": 14,
"fullName": "Della Richardson",
"title": "Ipsa",
"datePublished": "12/09/2000"
},
{
"id": 15,
"fullName": "Dollie Harris",
"title": "Qui",
"datePublished": "03/13/2013"
},
{
"id": 16,
"fullName": "Violet Conner",
"title": "Ea Libero",
"datePublished": "09/18/2007"
},
{
"id": 17,
"fullName": "Eula Brewer",
"title": "Numquam Ad Consectetur",
"datePublished": "02/04/2001"
},
{
"id": 18,
"fullName": "Josephine Webb",
"title": "Ab",
"datePublished": "07/03/2007"
},
{
"id": 19,
"fullName": "Catherine Morales",
"title": "Beatae Facere",
"datePublished": "09/17/2006"
},
{
"id": 20,
"fullName": "Betty Andrews",
"title": "Nisi Ipsam Cupiditate Magnam",
"datePublished": "03/14/2015"
},
{
"id": 21,
"fullName": "Maud McKinney",
"title": "Accusamus Neque",
"datePublished": "06/04/2007"
},
{
"id": 22,
"fullName": "Estella Bishop",
"title": "Cupiditate Ex Quae Odit",
"datePublished": "04/09/2013"
},
{
"id": 23,
"fullName": "Iva Frank",
"title": "Numquam Tempore Omnis",
"datePublished": "09/27/2011"
},
{
"id": 24,
"fullName": "Francis Johnston",
"title": "Sunt A Pariatur",
"datePublished": "04/20/2013"
},
{
"id": 25,
"fullName": "Sallie Yates",
"title": "Magnam Aliquam Sequi",
"datePublished": "11/07/2002"
},
{
"id": 26,
"fullName": "Marie Mack",
"title": "Blanditiis Possimus Dolor Sequi Ad",
"datePublished": "06/04/2000"
},
{
"id": 27,
"fullName": "Birdie Bradley",
"title": "Magni",
"datePublished": "09/21/2003"
},
{
"id": 28,
"fullName": "Etta Roberson",
"title": "Ullam Ipsum",
"datePublished": "05/12/2008"
},
{
"id": 29,
"fullName": "Lola Sanchez",
"title": "Fuga",
"datePublished": "02/17/2011"
},
{
"id": 30,
"fullName": "Lula Morton",
"title": "Aliquid Aperiam",
"datePublished": "11/24/2011"
},
{
"id": 31,
"fullName": "Minerva Rodriguez",
"title": "Itaque Quidem Officiis Laboriosam Ipsam",
"datePublished": "01/25/2010"
},
{
"id": 32,
"fullName": "Effie Williams",
"title": "Pariatur Recusandae",
"datePublished": "07/30/2001"
},
{
"id": 33,
"fullName": "Viola Marshall",
"title": "Nam Porro Earum A",
"datePublished": "12/14/2016"
},
{
"id": 34,
"fullName": "Mattie Simmons",
"title": "Dignissimos Tenetur Earum",
"datePublished": "06/27/2004"
},
{
"id": 35,
"fullName": "Lettie Martinez",
"title": "Doloribus Nemo Officia Quidem Veritatis",
"datePublished": "03/18/2000"
},
{
"id": 36,
"fullName": "Lois Mendez",
"title": "Culpa Labore Error Sint Dolorem",
"datePublished": "08/20/2013"
},
{
"id": 37,
"fullName": "Roxie Gonzalez",
"title": "Nisi",
"datePublished": "05/20/2007"
},
{
"id": 38,
"fullName": "Lou Conner",
"title": "Veritatis",
"datePublished": "01/24/2015"
},
{
"id": 39,
"fullName": "Barbara Joseph",
"title": "Explicabo Consequuntur Illo",
"datePublished": "11/17/2014"
},
{
"id": 40,
"fullName": "Victoria Colon",
"title": "Sint Temporibus Beatae Necessitatibus",
"datePublished": "03/25/2003"
},
{
"id": 41,
"fullName": "Pearl Atkins",
"title": "Culpa",
"datePublished": "03/17/2001"
},
{
"id": 42,
"fullName": "Johanna Dennis",
"title": "Enim Nobis",
"datePublished": "09/19/2006"
},
{
"id": 43,
"fullName": "Stella Jefferson",
"title": "Consequatur Earum Delectus Inventore",
"datePublished": "10/21/2013"
},
{
"id": 44,
"fullName": "Linnie Underwood",
"title": "Veniam Ipsum",
"datePublished": "02/16/2013"
},
{
"id": 45,
"fullName": "Nellie Jordan",
"title": "Ducimus Illo Totam Soluta Eaque",
"datePublished": "10/25/2014"
},
{
"id": 46,
"fullName": "Dorothy Atkins",
"title": "Id Tempore maxime",
"datePublished": "02/07/2001"
},
{
"id": 47,
"fullName": "Madge Guerrero",
"title": "Ipsam Ea Nesciunt",
"datePublished": "09/15/2009"
},
{
"id": 48,
"fullName": "Adele Jordan",
"title": "Commodi Minima",
"datePublished": "11/10/2006"
},
{
"id": 49,
"fullName": "Loretta Delgado",
"title": "Eveniet Ex Sunt Beatae",
"datePublished": "10/08/2001"
}
]
# Constants
DEVICE = width: 750, height: 1334
SCALE_RATIO = Screen.width / DEVICE.width
FONT = Utils.deviceFont()
MAX_COUNT = 8
SPACE = 10
# Variables
items = []
# Get list item data
getItem = (position) -> dummi[position]
# Get list item view
getView = (position, view, parent) ->
unless view
# List item view
view = new Layer
width: parent.width, height: 200
backgroundColor: "white"
borderRadius: 10
custom: position
# Number
view.num = new Layer
name: ".num"
x: Align.right(-7), y: Align.top(7)
width: 40, height: 40
style:
font: "Bold 20px/40px #{FONT}"
textAlign: "center"
letterSpacing: "-1.0px"
borderRadius: 20
parent: view
# Title
view.title = new Layer
name: ".title"
width: view.width, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "10px"
letterSpacing: "-0.5px"
color: "black"
backgroundColor: "transparent"
parent: view
# Description
view.description = new Layer
name: ".description"
y: view.title.maxY
width: view.width, height: 100
parent: view
# Author
view.author = new Layer
name: ".author"
y: view.description.maxY
width: view.width * 2/3, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "57px"
color: "black"
backgroundColor: "transparent"
parent: view
# Author photo
view.author.photo = new Layer
x: Align.left(10), y: Align.center
size: 40
backgroundColor: "gray"
borderRadius: 20
parent: view.author
# Date
view.date = new Layer
name: ".date"
x: view.author.maxX, y: view.description.maxY
width: view.width * 1/3, height: 50
style:
font: "400 30px/50px #{FONT}"
textAlign: "right"
paddingRight: "10px"
color: "black"
backgroundColor: "transparent"
parent: view
# Set description padding
view.description.frame = Utils.frameInset view.description.frame, 10
# Update item
view.update = (position) ->
index = position
if position < 0
index = _.size(dummi) - 1
else if position > _.size(dummi) - 1
index = 0
@custom = index
#
@data = data = getItem index
if data
@num.html = data.id
@title.html = data.title
@author.html = data.fullName
@date.html = data.datePublished
# Update contents
view.update position
# Return
view
# Draw draggable view (Compatible with all devices)
contents = new Layer
width: DEVICE.width, height: Screen.height / SCALE_RATIO
scale: SCALE_RATIO, originX: 0, originY: 0
backgroundColor: "transparent"
# Enable draggable
contents.draggable.enabled = true
# Disable horizontal scroll
contents.draggable.horizontal = false
# Contents move event
contents.onMove ->
f = _.first(items)
l = _.last(items)
# Last item move to top
if Utils.frameInFrame(f.screenFrame, Screen.frame)
contents.removeChild l
contents.addChild l
# Update contents data
l.update l.custom = f.custom - 1
# Set y position
l.maxY = f.y - SPACE
# Reorder list item
items.unshift(items.pop())
# First item move to bottom
else if !Utils.frameInFrame(items[1].screenFrame, Screen.frame)
contents.removeChild f
contents.addChild f
# Update contents data
f.update f.custom = l.custom + 1
# Set y position
f.y = l.maxY + SPACE
# Reorder list item
items.push(items.shift())
# Draw list items
for i in [0...MAX_COUNT]
# Draw
item = getView i, null, contents
item.name = "item - #{i}"
item.y = contents.contentFrame().height
item.y += SPACE if i isnt 0
# Events
item.ignoreEvents = false
item.onClick -> print @data
# Add item to list view
contents.addChild item
# Add item to array
items.push item | 202659 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: "Infinity looping list"
author: "<NAME>"
twitter: "threeword"
description: "- Support up and down scroll.\n- Recycle item layer of list.\n\n*Compatible with all devices."
Framer.Extras.ShareInfo.enable()
# Dummy data (Use http://dummi.io)
dummi = [
{
"id": 0,
"fullName": "<NAME>",
"title": "Recusandae Unde Sapiente Deleniti",
"datePublished": "06/03/2003"
},
{
"id": 1,
"fullName": "<NAME>",
"title": "Optio",
"datePublished": "08/03/2013"
},
{
"id": 2,
"fullName": "<NAME>",
"title": "Dicta",
"datePublished": "04/16/2006"
},
{
"id": 3,
"fullName": "<NAME>",
"title": "Sapiente Corporis Delectus",
"datePublished": "01/09/2002"
},
{
"id": 4,
"fullName": "<NAME>",
"title": "Neque",
"datePublished": "08/03/2008"
},
{
"id": 5,
"fullName": "<NAME>",
"title": "Quam Minima Incidunt Impedit",
"datePublished": "02/24/2016"
},
{
"id": 6,
"fullName": "<NAME>",
"title": "Labore Suscipit Nesciunt Facere",
"datePublished": "01/10/2001"
},
{
"id": 7,
"fullName": "<NAME>",
"title": "Alias Voluptas Atque Voluptatum Ea",
"datePublished": "12/11/2007"
},
{
"id": 8,
"fullName": "<NAME>",
"title": "Est Soluta Sunt Vel",
"datePublished": "01/22/2007"
},
{
"id": 9,
"fullName": "<NAME>",
"title": "Obcaecati In",
"datePublished": "02/02/2000"
},
{
"id": 10,
"fullName": "<NAME>",
"title": "Doloremque Soluta Architecto",
"datePublished": "05/25/2008"
},
{
"id": 11,
"fullName": "<NAME>",
"title": "Libero",
"datePublished": "09/29/2012"
},
{
"id": 12,
"fullName": "<NAME>",
"title": "Quis Tempora Suscipit",
"datePublished": "08/04/2013"
},
{
"id": 13,
"fullName": "<NAME>",
"title": "Inventore Molestias Culpa Accusantium",
"datePublished": "03/04/2012"
},
{
"id": 14,
"fullName": "<NAME>",
"title": "Ipsa",
"datePublished": "12/09/2000"
},
{
"id": 15,
"fullName": "<NAME>",
"title": "Qui",
"datePublished": "03/13/2013"
},
{
"id": 16,
"fullName": "<NAME>",
"title": "Ea Libero",
"datePublished": "09/18/2007"
},
{
"id": 17,
"fullName": "<NAME>",
"title": "Numquam Ad Consectetur",
"datePublished": "02/04/2001"
},
{
"id": 18,
"fullName": "<NAME>",
"title": "Ab",
"datePublished": "07/03/2007"
},
{
"id": 19,
"fullName": "<NAME>",
"title": "Beatae Facere",
"datePublished": "09/17/2006"
},
{
"id": 20,
"fullName": "<NAME>",
"title": "Nisi Ipsam Cupiditate Magnam",
"datePublished": "03/14/2015"
},
{
"id": 21,
"fullName": "<NAME>",
"title": "Accusamus Neque",
"datePublished": "06/04/2007"
},
{
"id": 22,
"fullName": "<NAME>",
"title": "Cupiditate Ex Quae Odit",
"datePublished": "04/09/2013"
},
{
"id": 23,
"fullName": "<NAME>",
"title": "Numquam Tempore Omnis",
"datePublished": "09/27/2011"
},
{
"id": 24,
"fullName": "<NAME>",
"title": "Sunt A Pariatur",
"datePublished": "04/20/2013"
},
{
"id": 25,
"fullName": "<NAME>",
"title": "Magnam Aliquam Sequi",
"datePublished": "11/07/2002"
},
{
"id": 26,
"fullName": "<NAME>",
"title": "Blanditiis Possimus Dolor Sequi Ad",
"datePublished": "06/04/2000"
},
{
"id": 27,
"fullName": "<NAME>",
"title": "Magni",
"datePublished": "09/21/2003"
},
{
"id": 28,
"fullName": "<NAME>",
"title": "Ullam Ipsum",
"datePublished": "05/12/2008"
},
{
"id": 29,
"fullName": "<NAME>",
"title": "Fuga",
"datePublished": "02/17/2011"
},
{
"id": 30,
"fullName": "<NAME>",
"title": "Aliquid Aperiam",
"datePublished": "11/24/2011"
},
{
"id": 31,
"fullName": "<NAME>",
"title": "Itaque Quidem Officiis Laboriosam Ipsam",
"datePublished": "01/25/2010"
},
{
"id": 32,
"fullName": "<NAME>",
"title": "Pariatur Recusandae",
"datePublished": "07/30/2001"
},
{
"id": 33,
"fullName": "<NAME>",
"title": "Nam Porro Earum A",
"datePublished": "12/14/2016"
},
{
"id": 34,
"fullName": "<NAME>",
"title": "Dignissimos Tenetur Earum",
"datePublished": "06/27/2004"
},
{
"id": 35,
"fullName": "<NAME>",
"title": "Doloribus Nemo Officia Quidem Veritatis",
"datePublished": "03/18/2000"
},
{
"id": 36,
"fullName": "<NAME>",
"title": "Culpa Labore Error Sint Dolorem",
"datePublished": "08/20/2013"
},
{
"id": 37,
"fullName": "<NAME>",
"title": "Nisi",
"datePublished": "05/20/2007"
},
{
"id": 38,
"fullName": "<NAME>",
"title": "Veritatis",
"datePublished": "01/24/2015"
},
{
"id": 39,
"fullName": "<NAME>",
"title": "Explicabo Consequuntur Illo",
"datePublished": "11/17/2014"
},
{
"id": 40,
"fullName": "<NAME>",
"title": "Sint Temporibus Beatae Necessitatibus",
"datePublished": "03/25/2003"
},
{
"id": 41,
"fullName": "<NAME>",
"title": "Culpa",
"datePublished": "03/17/2001"
},
{
"id": 42,
"fullName": "<NAME>",
"title": "Enim Nobis",
"datePublished": "09/19/2006"
},
{
"id": 43,
"fullName": "<NAME>",
"title": "Consequatur Earum Delectus Inventore",
"datePublished": "10/21/2013"
},
{
"id": 44,
"fullName": "<NAME>",
"title": "Veniam Ipsum",
"datePublished": "02/16/2013"
},
{
"id": 45,
"fullName": "<NAME>",
"title": "Ducimus Illo Totam Soluta Eaque",
"datePublished": "10/25/2014"
},
{
"id": 46,
"fullName": "<NAME>",
"title": "Id Tempore maxime",
"datePublished": "02/07/2001"
},
{
"id": 47,
"fullName": "<NAME>",
"title": "Ipsam Ea Nesciunt",
"datePublished": "09/15/2009"
},
{
"id": 48,
"fullName": "<NAME>",
"title": "Commodi Minima",
"datePublished": "11/10/2006"
},
{
"id": 49,
"fullName": "<NAME>",
"title": "Eveniet Ex Sunt Beatae",
"datePublished": "10/08/2001"
}
]
# Constants
DEVICE = width: 750, height: 1334
SCALE_RATIO = Screen.width / DEVICE.width
FONT = Utils.deviceFont()
MAX_COUNT = 8
SPACE = 10
# Variables
items = []
# Get list item data
getItem = (position) -> dummi[position]
# Get list item view
getView = (position, view, parent) ->
unless view
# List item view
view = new Layer
width: parent.width, height: 200
backgroundColor: "white"
borderRadius: 10
custom: position
# Number
view.num = new Layer
name: ".num"
x: Align.right(-7), y: Align.top(7)
width: 40, height: 40
style:
font: "Bold 20px/40px #{FONT}"
textAlign: "center"
letterSpacing: "-1.0px"
borderRadius: 20
parent: view
# Title
view.title = new Layer
name: ".title"
width: view.width, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "10px"
letterSpacing: "-0.5px"
color: "black"
backgroundColor: "transparent"
parent: view
# Description
view.description = new Layer
name: ".description"
y: view.title.maxY
width: view.width, height: 100
parent: view
# Author
view.author = new Layer
name: ".author"
y: view.description.maxY
width: view.width * 2/3, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "57px"
color: "black"
backgroundColor: "transparent"
parent: view
# Author photo
view.author.photo = new Layer
x: Align.left(10), y: Align.center
size: 40
backgroundColor: "gray"
borderRadius: 20
parent: view.author
# Date
view.date = new Layer
name: ".date"
x: view.author.maxX, y: view.description.maxY
width: view.width * 1/3, height: 50
style:
font: "400 30px/50px #{FONT}"
textAlign: "right"
paddingRight: "10px"
color: "black"
backgroundColor: "transparent"
parent: view
# Set description padding
view.description.frame = Utils.frameInset view.description.frame, 10
# Update item
view.update = (position) ->
index = position
if position < 0
index = _.size(dummi) - 1
else if position > _.size(dummi) - 1
index = 0
@custom = index
#
@data = data = getItem index
if data
@num.html = data.id
@title.html = data.title
@author.html = data.fullName
@date.html = data.datePublished
# Update contents
view.update position
# Return
view
# Draw draggable view (Compatible with all devices)
contents = new Layer
width: DEVICE.width, height: Screen.height / SCALE_RATIO
scale: SCALE_RATIO, originX: 0, originY: 0
backgroundColor: "transparent"
# Enable draggable
contents.draggable.enabled = true
# Disable horizontal scroll
contents.draggable.horizontal = false
# Contents move event
contents.onMove ->
f = _.first(items)
l = _.last(items)
# Last item move to top
if Utils.frameInFrame(f.screenFrame, Screen.frame)
contents.removeChild l
contents.addChild l
# Update contents data
l.update l.custom = f.custom - 1
# Set y position
l.maxY = f.y - SPACE
# Reorder list item
items.unshift(items.pop())
# First item move to bottom
else if !Utils.frameInFrame(items[1].screenFrame, Screen.frame)
contents.removeChild f
contents.addChild f
# Update contents data
f.update f.custom = l.custom + 1
# Set y position
f.y = l.maxY + SPACE
# Reorder list item
items.push(items.shift())
# Draw list items
for i in [0...MAX_COUNT]
# Draw
item = getView i, null, contents
item.name = "item - #{i}"
item.y = contents.contentFrame().height
item.y += SPACE if i isnt 0
# Events
item.ignoreEvents = false
item.onClick -> print @data
# Add item to list view
contents.addChild item
# Add item to array
items.push item | true | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: "Infinity looping list"
author: "PI:NAME:<NAME>END_PI"
twitter: "threeword"
description: "- Support up and down scroll.\n- Recycle item layer of list.\n\n*Compatible with all devices."
Framer.Extras.ShareInfo.enable()
# Dummy data (Use http://dummi.io)
dummi = [
{
"id": 0,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Recusandae Unde Sapiente Deleniti",
"datePublished": "06/03/2003"
},
{
"id": 1,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Optio",
"datePublished": "08/03/2013"
},
{
"id": 2,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Dicta",
"datePublished": "04/16/2006"
},
{
"id": 3,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Sapiente Corporis Delectus",
"datePublished": "01/09/2002"
},
{
"id": 4,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Neque",
"datePublished": "08/03/2008"
},
{
"id": 5,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Quam Minima Incidunt Impedit",
"datePublished": "02/24/2016"
},
{
"id": 6,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Labore Suscipit Nesciunt Facere",
"datePublished": "01/10/2001"
},
{
"id": 7,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Alias Voluptas Atque Voluptatum Ea",
"datePublished": "12/11/2007"
},
{
"id": 8,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Est Soluta Sunt Vel",
"datePublished": "01/22/2007"
},
{
"id": 9,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Obcaecati In",
"datePublished": "02/02/2000"
},
{
"id": 10,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Doloremque Soluta Architecto",
"datePublished": "05/25/2008"
},
{
"id": 11,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Libero",
"datePublished": "09/29/2012"
},
{
"id": 12,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Quis Tempora Suscipit",
"datePublished": "08/04/2013"
},
{
"id": 13,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Inventore Molestias Culpa Accusantium",
"datePublished": "03/04/2012"
},
{
"id": 14,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ipsa",
"datePublished": "12/09/2000"
},
{
"id": 15,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Qui",
"datePublished": "03/13/2013"
},
{
"id": 16,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ea Libero",
"datePublished": "09/18/2007"
},
{
"id": 17,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Numquam Ad Consectetur",
"datePublished": "02/04/2001"
},
{
"id": 18,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ab",
"datePublished": "07/03/2007"
},
{
"id": 19,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Beatae Facere",
"datePublished": "09/17/2006"
},
{
"id": 20,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Nisi Ipsam Cupiditate Magnam",
"datePublished": "03/14/2015"
},
{
"id": 21,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Accusamus Neque",
"datePublished": "06/04/2007"
},
{
"id": 22,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Cupiditate Ex Quae Odit",
"datePublished": "04/09/2013"
},
{
"id": 23,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Numquam Tempore Omnis",
"datePublished": "09/27/2011"
},
{
"id": 24,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Sunt A Pariatur",
"datePublished": "04/20/2013"
},
{
"id": 25,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Magnam Aliquam Sequi",
"datePublished": "11/07/2002"
},
{
"id": 26,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Blanditiis Possimus Dolor Sequi Ad",
"datePublished": "06/04/2000"
},
{
"id": 27,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Magni",
"datePublished": "09/21/2003"
},
{
"id": 28,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ullam Ipsum",
"datePublished": "05/12/2008"
},
{
"id": 29,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Fuga",
"datePublished": "02/17/2011"
},
{
"id": 30,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Aliquid Aperiam",
"datePublished": "11/24/2011"
},
{
"id": 31,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Itaque Quidem Officiis Laboriosam Ipsam",
"datePublished": "01/25/2010"
},
{
"id": 32,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Pariatur Recusandae",
"datePublished": "07/30/2001"
},
{
"id": 33,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Nam Porro Earum A",
"datePublished": "12/14/2016"
},
{
"id": 34,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Dignissimos Tenetur Earum",
"datePublished": "06/27/2004"
},
{
"id": 35,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Doloribus Nemo Officia Quidem Veritatis",
"datePublished": "03/18/2000"
},
{
"id": 36,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Culpa Labore Error Sint Dolorem",
"datePublished": "08/20/2013"
},
{
"id": 37,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Nisi",
"datePublished": "05/20/2007"
},
{
"id": 38,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Veritatis",
"datePublished": "01/24/2015"
},
{
"id": 39,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Explicabo Consequuntur Illo",
"datePublished": "11/17/2014"
},
{
"id": 40,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Sint Temporibus Beatae Necessitatibus",
"datePublished": "03/25/2003"
},
{
"id": 41,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Culpa",
"datePublished": "03/17/2001"
},
{
"id": 42,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Enim Nobis",
"datePublished": "09/19/2006"
},
{
"id": 43,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Consequatur Earum Delectus Inventore",
"datePublished": "10/21/2013"
},
{
"id": 44,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Veniam Ipsum",
"datePublished": "02/16/2013"
},
{
"id": 45,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ducimus Illo Totam Soluta Eaque",
"datePublished": "10/25/2014"
},
{
"id": 46,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Id Tempore maxime",
"datePublished": "02/07/2001"
},
{
"id": 47,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Ipsam Ea Nesciunt",
"datePublished": "09/15/2009"
},
{
"id": 48,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Commodi Minima",
"datePublished": "11/10/2006"
},
{
"id": 49,
"fullName": "PI:NAME:<NAME>END_PI",
"title": "Eveniet Ex Sunt Beatae",
"datePublished": "10/08/2001"
}
]
# Constants
DEVICE = width: 750, height: 1334
SCALE_RATIO = Screen.width / DEVICE.width
FONT = Utils.deviceFont()
MAX_COUNT = 8
SPACE = 10
# Variables
items = []
# Get list item data
getItem = (position) -> dummi[position]
# Get list item view
getView = (position, view, parent) ->
unless view
# List item view
view = new Layer
width: parent.width, height: 200
backgroundColor: "white"
borderRadius: 10
custom: position
# Number
view.num = new Layer
name: ".num"
x: Align.right(-7), y: Align.top(7)
width: 40, height: 40
style:
font: "Bold 20px/40px #{FONT}"
textAlign: "center"
letterSpacing: "-1.0px"
borderRadius: 20
parent: view
# Title
view.title = new Layer
name: ".title"
width: view.width, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "10px"
letterSpacing: "-0.5px"
color: "black"
backgroundColor: "transparent"
parent: view
# Description
view.description = new Layer
name: ".description"
y: view.title.maxY
width: view.width, height: 100
parent: view
# Author
view.author = new Layer
name: ".author"
y: view.description.maxY
width: view.width * 2/3, height: 50
style:
font: "Bold 30px/50px #{FONT}"
paddingLeft: "57px"
color: "black"
backgroundColor: "transparent"
parent: view
# Author photo
view.author.photo = new Layer
x: Align.left(10), y: Align.center
size: 40
backgroundColor: "gray"
borderRadius: 20
parent: view.author
# Date
view.date = new Layer
name: ".date"
x: view.author.maxX, y: view.description.maxY
width: view.width * 1/3, height: 50
style:
font: "400 30px/50px #{FONT}"
textAlign: "right"
paddingRight: "10px"
color: "black"
backgroundColor: "transparent"
parent: view
# Set description padding
view.description.frame = Utils.frameInset view.description.frame, 10
# Update item
view.update = (position) ->
index = position
if position < 0
index = _.size(dummi) - 1
else if position > _.size(dummi) - 1
index = 0
@custom = index
#
@data = data = getItem index
if data
@num.html = data.id
@title.html = data.title
@author.html = data.fullName
@date.html = data.datePublished
# Update contents
view.update position
# Return
view
# Draw draggable view (Compatible with all devices)
contents = new Layer
width: DEVICE.width, height: Screen.height / SCALE_RATIO
scale: SCALE_RATIO, originX: 0, originY: 0
backgroundColor: "transparent"
# Enable draggable
contents.draggable.enabled = true
# Disable horizontal scroll
contents.draggable.horizontal = false
# Contents move event
contents.onMove ->
f = _.first(items)
l = _.last(items)
# Last item move to top
if Utils.frameInFrame(f.screenFrame, Screen.frame)
contents.removeChild l
contents.addChild l
# Update contents data
l.update l.custom = f.custom - 1
# Set y position
l.maxY = f.y - SPACE
# Reorder list item
items.unshift(items.pop())
# First item move to bottom
else if !Utils.frameInFrame(items[1].screenFrame, Screen.frame)
contents.removeChild f
contents.addChild f
# Update contents data
f.update f.custom = l.custom + 1
# Set y position
f.y = l.maxY + SPACE
# Reorder list item
items.push(items.shift())
# Draw list items
for i in [0...MAX_COUNT]
# Draw
item = getView i, null, contents
item.name = "item - #{i}"
item.y = contents.contentFrame().height
item.y += SPACE if i isnt 0
# Events
item.ignoreEvents = false
item.onClick -> print @data
# Add item to list view
contents.addChild item
# Add item to array
items.push item |
[
{
"context": ">\n @inquiry.set session_id: 'foobar', name: 'Craig', email: 'craigspaeth@gmail.com'\n (@inquiry.",
"end": 550,
"score": 0.9994299411773682,
"start": 545,
"tag": "NAME",
"value": "Craig"
},
{
"context": ".set session_id: 'foobar', name: 'Craig', email: 'craigs... | src/mobile/test/models/inquiry.test.coffee | jo-rs/force | 0 | sinon = require 'sinon'
Inquiry = require '../../models/inquiry'
{ fabricate } = require '@artsy/antigravity'
describe 'Inquiry', ->
beforeEach ->
@inquiry = new Inquiry fabricate 'artwork_inquiry_request'
describe '#validate', ->
it 'ensures a name and email for inquiries with a session', ->
@inquiry.set session_id: 'foobar', name: null
@inquiry.validate(@inquiry.toJSON()).should.containEql 'Please include a valid name'
it 'allows valid emails and names', ->
@inquiry.set session_id: 'foobar', name: 'Craig', email: 'craigspaeth@gmail.com'
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
it 'doesnt complain about trailing/leading whitespace', ->
@inquiry.set session_id: 'foobar', name: 'Craig', email: 'craigspaeth@gmail.com '
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
| 144599 | sinon = require 'sinon'
Inquiry = require '../../models/inquiry'
{ fabricate } = require '@artsy/antigravity'
describe 'Inquiry', ->
beforeEach ->
@inquiry = new Inquiry fabricate 'artwork_inquiry_request'
describe '#validate', ->
it 'ensures a name and email for inquiries with a session', ->
@inquiry.set session_id: 'foobar', name: null
@inquiry.validate(@inquiry.toJSON()).should.containEql 'Please include a valid name'
it 'allows valid emails and names', ->
@inquiry.set session_id: 'foobar', name: '<NAME>', email: '<EMAIL>'
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
it 'doesnt complain about trailing/leading whitespace', ->
@inquiry.set session_id: 'foobar', name: '<NAME>', email: '<EMAIL> '
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
| true | sinon = require 'sinon'
Inquiry = require '../../models/inquiry'
{ fabricate } = require '@artsy/antigravity'
describe 'Inquiry', ->
beforeEach ->
@inquiry = new Inquiry fabricate 'artwork_inquiry_request'
describe '#validate', ->
it 'ensures a name and email for inquiries with a session', ->
@inquiry.set session_id: 'foobar', name: null
@inquiry.validate(@inquiry.toJSON()).should.containEql 'Please include a valid name'
it 'allows valid emails and names', ->
@inquiry.set session_id: 'foobar', name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI'
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
it 'doesnt complain about trailing/leading whitespace', ->
@inquiry.set session_id: 'foobar', name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI '
(@inquiry.validate(@inquiry.toJSON())?).should.not.be.ok()
|
[
{
"context": "username/password from POST\n username = req.body.username\n password = makeHash req.body.password\n\n db.get",
"end": 2552,
"score": 0.7997065186500549,
"start": 2544,
"tag": "USERNAME",
"value": "username"
},
{
"context": "m POST\n username = req.body.username\n ... | app.coffee | honza/bolt | 2 | express = require 'express'
crypto = require 'crypto'
redis = require 'redis'
RedisStore = require('connect-redis')(express)
# This is so that Bolt works on Heroku --- if Heroku supported websockets.
if process.env.REDISTOGO_URL
console.log 'redis to go'
db = require('redis-url').createClient process.env.REDISTOGO_URL
else
console.log 'not to go'
db = redis.createClient()
redisStore = new RedisStore
client: db
io = require 'socket.io'
# Helper functions
say = (word) -> console.log word
makeHash = (word) ->
h = crypto.createHash 'sha1'
h.update word
h.digest 'hex'
# Return utc now in format suitable for jquery.timeago
getNow = ->
pad = (n) ->
if n < 10
return "0#{n}"
else
return n
d = new Date
year = d.getUTCFullYear()
month = pad (d.getUTCMonth() + 1)
day = pad d.getUTCDate()
hour = pad d.getUTCHours()
minute = pad d.getUTCMinutes()
second = pad d.getUTCSeconds()
"#{year}-#{month}-#{day}T#{hour}:#{minute}:#{second}Z"
# Redis functions
db.on "error", (err) -> say err
createUser = (username, password) ->
db.incr 'global:nextUserId', (err, res) ->
db.set "username:#{username}:uid", res
db.set "uid:#{res}:username", username
db.set "uid:#{res}:password", makeHash password
db.lpush "users", "#{username}:#{res}"
app = module.exports = express.createServer()
# Express configuration
app.configure ->
app.set 'views', "#{__dirname}/views"
app.set 'view engine', 'jade'
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.cookieParser()
app.use express.session
secret: "+N3,6.By4(S"
store: redisStore
cookie:
path: '/'
httpOnly: false
maxAge: 14400000
app.use app.router
app.use express.compiler
src: "#{__dirname}/public"
enable: ['less']
app.use express.static("#{__dirname}/public")
app.configure 'development', ->
app.use express.errorHandler
dumpExceptions: true
showStack: true
# Routes
app.get '/', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
else
id = req.session.userid
# Select logged in user's messages
db.llen "uid:#{id}:timeline", (err, data) ->
db.lrange "uid:#{id}:timeline", 0, data, (err, data) ->
data = data.reverse()
res.render 'index',
auth: true
home: true
messages: data
app.get '/login', (req, res) ->
res.render 'login',
error: null
app.post '/login', (req, res) ->
# Extract username/password from POST
username = req.body.username
password = makeHash req.body.password
db.get "username:#{username}:uid", (err, result) ->
if err
res.render 'login',
error: 'Wrong username/password'
else
id = result
db.get "uid:#{result}:password", (err, result) ->
if err
res.render 'login',
error: 'Database error. Try again.'
else
if result is password
req.session.boltauth = 'true'
req.session.userid = id
req.session.username = username
res.redirect '/'
else
res.render 'login',
error: 'Wrong username/password'
app.get '/logout', (req, res) ->
req.session.destroy()
res.redirect '/'
app.get '/register', (req, res) ->
res.render 'register',
error: false
app.post '/register', (req, res) ->
username = req.body.username
password = req.body.password
# Check if user exists
db.get "username:#{username}:uid", (err, data) ->
if data
res.render 'register',
error: 'taken'
else
createUser username, password
res.redirect '/login'
app.get '/users', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
id = req.session.userid
db.llen 'users', (err, result) ->
db.lrange 'users', 0, result, (err, result) ->
users = []
for user in result
parts = user.split ':'
users.push
username: parts[0]
id: parts[1]
# Now that we have the users array, let's add to each object a key to
# indicate whether we already follow this user
db.llen "uid:#{id}:following", (err, result) ->
db.lrange "uid:#{id}:following", 0, result, (err, result) ->
# Loop over and assign
for u in users
if u.id in result
u.follow = true
res.render 'users',
users: users
auth: true
app.post '/follow', (req, res) ->
id = req.session.userid
tofollow = req.body.id
db.rpush "uid:#{id}:following", tofollow, (er, d) ->
db.rpush "uid:#{tofollow}:followers", id, (er, d) ->
res.send 'ok'
# Only listen on $ node app.js
if not module.parent
io = io.listen app
app.listen process.env.PORT or 8000
console.log "Server running..."
# Socket helpers
sendMessageToFriends = (message, socket) ->
console.log 'sending message to friends'
sid = message.cookie
message = message.message
getUserByCookie sid, (client) ->
now = getNow()
message =
body: message
author: client.username
id: client.userid
sent: now
# message = JSON.stringify message
db.llen "uid:#{client.userid}:followers", (err, result) ->
db.lrange "uid:#{client.userid}:followers", 0, result, (err, result) ->
for user in result
# Send through sockets first
if user in Object.keys clients
say "sending a message to #{user}"
message.body = message.body.replace /</g, '<'
message.body = message.body.replace />/g, '>'
clients[user].socket.emit 'message', message
# And then save it in redis
db.rpush "uid:#{user}:timeline", JSON.stringify message
clients = {}
getTotalClients = -> Object.keys(clients).length
getUserByCookie = (cookie, callback) ->
db.get "sess:#{cookie}", (err, r) ->
callback JSON.parse r
registerClient = (sid, socket) ->
getUserByCookie sid.cookie, (data) ->
client =
id: data.userid
username: data.username
socket: socket
clients[client.id] = client
# client.id = d.userid
# client.username = d.username
# clients[client.id] = client
# Kick it
io.sockets.on 'connection', (client) ->
say 'got a new client'
client.on 'auth', (data) ->
registerClient data, client
client.on 'message', (message) ->
sendMessageToFriends message
client.on 'disconnect', ->
say 'a client disappeared'
delete clients[client.id]
t = getTotalClients()
say "total: #{t}"
| 158800 | express = require 'express'
crypto = require 'crypto'
redis = require 'redis'
RedisStore = require('connect-redis')(express)
# This is so that Bolt works on Heroku --- if Heroku supported websockets.
if process.env.REDISTOGO_URL
console.log 'redis to go'
db = require('redis-url').createClient process.env.REDISTOGO_URL
else
console.log 'not to go'
db = redis.createClient()
redisStore = new RedisStore
client: db
io = require 'socket.io'
# Helper functions
say = (word) -> console.log word
makeHash = (word) ->
h = crypto.createHash 'sha1'
h.update word
h.digest 'hex'
# Return utc now in format suitable for jquery.timeago
getNow = ->
pad = (n) ->
if n < 10
return "0#{n}"
else
return n
d = new Date
year = d.getUTCFullYear()
month = pad (d.getUTCMonth() + 1)
day = pad d.getUTCDate()
hour = pad d.getUTCHours()
minute = pad d.getUTCMinutes()
second = pad d.getUTCSeconds()
"#{year}-#{month}-#{day}T#{hour}:#{minute}:#{second}Z"
# Redis functions
db.on "error", (err) -> say err
createUser = (username, password) ->
db.incr 'global:nextUserId', (err, res) ->
db.set "username:#{username}:uid", res
db.set "uid:#{res}:username", username
db.set "uid:#{res}:password", makeHash password
db.lpush "users", "#{username}:#{res}"
app = module.exports = express.createServer()
# Express configuration
app.configure ->
app.set 'views', "#{__dirname}/views"
app.set 'view engine', 'jade'
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.cookieParser()
app.use express.session
secret: "+N3,6.By4(S"
store: redisStore
cookie:
path: '/'
httpOnly: false
maxAge: 14400000
app.use app.router
app.use express.compiler
src: "#{__dirname}/public"
enable: ['less']
app.use express.static("#{__dirname}/public")
app.configure 'development', ->
app.use express.errorHandler
dumpExceptions: true
showStack: true
# Routes
app.get '/', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
else
id = req.session.userid
# Select logged in user's messages
db.llen "uid:#{id}:timeline", (err, data) ->
db.lrange "uid:#{id}:timeline", 0, data, (err, data) ->
data = data.reverse()
res.render 'index',
auth: true
home: true
messages: data
app.get '/login', (req, res) ->
res.render 'login',
error: null
app.post '/login', (req, res) ->
# Extract username/password from POST
username = req.body.username
password = <PASSWORD>
db.get "username:#{username}:uid", (err, result) ->
if err
res.render 'login',
error: 'Wrong username/password'
else
id = result
db.get "uid:#{result}:password", (err, result) ->
if err
res.render 'login',
error: 'Database error. Try again.'
else
if result is password
req.session.boltauth = 'true'
req.session.userid = id
req.session.username = username
res.redirect '/'
else
res.render 'login',
error: 'Wrong username/password'
app.get '/logout', (req, res) ->
req.session.destroy()
res.redirect '/'
app.get '/register', (req, res) ->
res.render 'register',
error: false
app.post '/register', (req, res) ->
username = req.body.username
password = <PASSWORD>
# Check if user exists
db.get "username:#{username}:uid", (err, data) ->
if data
res.render 'register',
error: 'taken'
else
createUser username, password
res.redirect '/login'
app.get '/users', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
id = req.session.userid
db.llen 'users', (err, result) ->
db.lrange 'users', 0, result, (err, result) ->
users = []
for user in result
parts = user.split ':'
users.push
username: parts[0]
id: parts[1]
# Now that we have the users array, let's add to each object a key to
# indicate whether we already follow this user
db.llen "uid:#{id}:following", (err, result) ->
db.lrange "uid:#{id}:following", 0, result, (err, result) ->
# Loop over and assign
for u in users
if u.id in result
u.follow = true
res.render 'users',
users: users
auth: true
app.post '/follow', (req, res) ->
id = req.session.userid
tofollow = req.body.id
db.rpush "uid:#{id}:following", tofollow, (er, d) ->
db.rpush "uid:#{tofollow}:followers", id, (er, d) ->
res.send 'ok'
# Only listen on $ node app.js
if not module.parent
io = io.listen app
app.listen process.env.PORT or 8000
console.log "Server running..."
# Socket helpers
sendMessageToFriends = (message, socket) ->
console.log 'sending message to friends'
sid = message.cookie
message = message.message
getUserByCookie sid, (client) ->
now = getNow()
message =
body: message
author: client.username
id: client.userid
sent: now
# message = JSON.stringify message
db.llen "uid:#{client.userid}:followers", (err, result) ->
db.lrange "uid:#{client.userid}:followers", 0, result, (err, result) ->
for user in result
# Send through sockets first
if user in Object.keys clients
say "sending a message to #{user}"
message.body = message.body.replace /</g, '<'
message.body = message.body.replace />/g, '>'
clients[user].socket.emit 'message', message
# And then save it in redis
db.rpush "uid:#{user}:timeline", JSON.stringify message
clients = {}
getTotalClients = -> Object.keys(clients).length
getUserByCookie = (cookie, callback) ->
db.get "sess:#{cookie}", (err, r) ->
callback JSON.parse r
registerClient = (sid, socket) ->
getUserByCookie sid.cookie, (data) ->
client =
id: data.userid
username: data.username
socket: socket
clients[client.id] = client
# client.id = d.userid
# client.username = d.username
# clients[client.id] = client
# Kick it
io.sockets.on 'connection', (client) ->
say 'got a new client'
client.on 'auth', (data) ->
registerClient data, client
client.on 'message', (message) ->
sendMessageToFriends message
client.on 'disconnect', ->
say 'a client disappeared'
delete clients[client.id]
t = getTotalClients()
say "total: #{t}"
| true | express = require 'express'
crypto = require 'crypto'
redis = require 'redis'
RedisStore = require('connect-redis')(express)
# This is so that Bolt works on Heroku --- if Heroku supported websockets.
if process.env.REDISTOGO_URL
console.log 'redis to go'
db = require('redis-url').createClient process.env.REDISTOGO_URL
else
console.log 'not to go'
db = redis.createClient()
redisStore = new RedisStore
client: db
io = require 'socket.io'
# Helper functions
say = (word) -> console.log word
makeHash = (word) ->
h = crypto.createHash 'sha1'
h.update word
h.digest 'hex'
# Return utc now in format suitable for jquery.timeago
getNow = ->
pad = (n) ->
if n < 10
return "0#{n}"
else
return n
d = new Date
year = d.getUTCFullYear()
month = pad (d.getUTCMonth() + 1)
day = pad d.getUTCDate()
hour = pad d.getUTCHours()
minute = pad d.getUTCMinutes()
second = pad d.getUTCSeconds()
"#{year}-#{month}-#{day}T#{hour}:#{minute}:#{second}Z"
# Redis functions
db.on "error", (err) -> say err
createUser = (username, password) ->
db.incr 'global:nextUserId', (err, res) ->
db.set "username:#{username}:uid", res
db.set "uid:#{res}:username", username
db.set "uid:#{res}:password", makeHash password
db.lpush "users", "#{username}:#{res}"
app = module.exports = express.createServer()
# Express configuration
app.configure ->
app.set 'views', "#{__dirname}/views"
app.set 'view engine', 'jade'
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.cookieParser()
app.use express.session
secret: "+N3,6.By4(S"
store: redisStore
cookie:
path: '/'
httpOnly: false
maxAge: 14400000
app.use app.router
app.use express.compiler
src: "#{__dirname}/public"
enable: ['less']
app.use express.static("#{__dirname}/public")
app.configure 'development', ->
app.use express.errorHandler
dumpExceptions: true
showStack: true
# Routes
app.get '/', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
else
id = req.session.userid
# Select logged in user's messages
db.llen "uid:#{id}:timeline", (err, data) ->
db.lrange "uid:#{id}:timeline", 0, data, (err, data) ->
data = data.reverse()
res.render 'index',
auth: true
home: true
messages: data
app.get '/login', (req, res) ->
res.render 'login',
error: null
app.post '/login', (req, res) ->
# Extract username/password from POST
username = req.body.username
password = PI:PASSWORD:<PASSWORD>END_PI
db.get "username:#{username}:uid", (err, result) ->
if err
res.render 'login',
error: 'Wrong username/password'
else
id = result
db.get "uid:#{result}:password", (err, result) ->
if err
res.render 'login',
error: 'Database error. Try again.'
else
if result is password
req.session.boltauth = 'true'
req.session.userid = id
req.session.username = username
res.redirect '/'
else
res.render 'login',
error: 'Wrong username/password'
app.get '/logout', (req, res) ->
req.session.destroy()
res.redirect '/'
app.get '/register', (req, res) ->
res.render 'register',
error: false
app.post '/register', (req, res) ->
username = req.body.username
password = PI:PASSWORD:<PASSWORD>END_PI
# Check if user exists
db.get "username:#{username}:uid", (err, data) ->
if data
res.render 'register',
error: 'taken'
else
createUser username, password
res.redirect '/login'
app.get '/users', (req, res) ->
if not req.session.boltauth
res.redirect '/login'
id = req.session.userid
db.llen 'users', (err, result) ->
db.lrange 'users', 0, result, (err, result) ->
users = []
for user in result
parts = user.split ':'
users.push
username: parts[0]
id: parts[1]
# Now that we have the users array, let's add to each object a key to
# indicate whether we already follow this user
db.llen "uid:#{id}:following", (err, result) ->
db.lrange "uid:#{id}:following", 0, result, (err, result) ->
# Loop over and assign
for u in users
if u.id in result
u.follow = true
res.render 'users',
users: users
auth: true
app.post '/follow', (req, res) ->
id = req.session.userid
tofollow = req.body.id
db.rpush "uid:#{id}:following", tofollow, (er, d) ->
db.rpush "uid:#{tofollow}:followers", id, (er, d) ->
res.send 'ok'
# Only listen on $ node app.js
if not module.parent
io = io.listen app
app.listen process.env.PORT or 8000
console.log "Server running..."
# Socket helpers
sendMessageToFriends = (message, socket) ->
console.log 'sending message to friends'
sid = message.cookie
message = message.message
getUserByCookie sid, (client) ->
now = getNow()
message =
body: message
author: client.username
id: client.userid
sent: now
# message = JSON.stringify message
db.llen "uid:#{client.userid}:followers", (err, result) ->
db.lrange "uid:#{client.userid}:followers", 0, result, (err, result) ->
for user in result
# Send through sockets first
if user in Object.keys clients
say "sending a message to #{user}"
message.body = message.body.replace /</g, '<'
message.body = message.body.replace />/g, '>'
clients[user].socket.emit 'message', message
# And then save it in redis
db.rpush "uid:#{user}:timeline", JSON.stringify message
clients = {}
getTotalClients = -> Object.keys(clients).length
getUserByCookie = (cookie, callback) ->
db.get "sess:#{cookie}", (err, r) ->
callback JSON.parse r
registerClient = (sid, socket) ->
getUserByCookie sid.cookie, (data) ->
client =
id: data.userid
username: data.username
socket: socket
clients[client.id] = client
# client.id = d.userid
# client.username = d.username
# clients[client.id] = client
# Kick it
io.sockets.on 'connection', (client) ->
say 'got a new client'
client.on 'auth', (data) ->
registerClient data, client
client.on 'message', (message) ->
sendMessageToFriends message
client.on 'disconnect', ->
say 'a client disappeared'
delete clients[client.id]
t = getTotalClients()
say "total: #{t}"
|
[
{
"context": "s chrome.runtime.lastError)\n else\n # TODO(ilya): Should this be getLastFocused instead?\n c",
"end": 12201,
"score": 0.6515578627586365,
"start": 12198,
"tag": "USERNAME",
"value": "ily"
},
{
"context": "hrome.runtime.lastError)\n else\n # TODO(il... | background_scripts/main.coffee | JamesLinus/vimium | 1 | root = exports ? window
# The browser may have tabs already open. We inject the content scripts immediately so that they work straight
# away.
chrome.runtime.onInstalled.addListener ({ reason }) ->
# See https://developer.chrome.com/extensions/runtime#event-onInstalled
return if reason in [ "chrome_update", "shared_module_update" ]
manifest = chrome.runtime.getManifest()
# Content scripts loaded on every page should be in the same group. We assume it is the first.
contentScripts = manifest.content_scripts[0]
jobs = [ [ chrome.tabs.executeScript, contentScripts.js ], [ chrome.tabs.insertCSS, contentScripts.css ] ]
# Chrome complains if we don't evaluate chrome.runtime.lastError on errors (and we get errors for tabs on
# which Vimium cannot run).
checkLastRuntimeError = -> chrome.runtime.lastError
chrome.tabs.query { status: "complete" }, (tabs) ->
for tab in tabs
for [ func, files ] in jobs
for file in files
func tab.id, { file: file, allFrames: contentScripts.all_frames }, checkLastRuntimeError
currentVersion = Utils.getCurrentVersion()
tabQueue = {} # windowId -> Array
tabInfoMap = {} # tabId -> object with various tab properties
keyQueue = "" # Queue of keys typed
validFirstKeys = {}
singleKeyCommands = []
focusedFrame = null
frameIdsForTab = {}
root.urlForTab = {}
# Keys are either literal characters, or "named" - for example <a-b> (alt+b), <left> (left arrow) or <f12>
# This regular expression captures two groups: the first is a named key, the second is the remainder of
# the string.
namedKeyRegex = /^(<(?:[amc]-.|(?:[amc]-)?[a-z0-9]{2,5})>)(.*)$/
# Event handlers
selectionChangedHandlers = []
# Note. tabLoadedHandlers handlers is exported for use also by "marks.coffee".
root.tabLoadedHandlers = {} # tabId -> function()
# A secret, available only within the current instantiation of Vimium. The secret is big, likely unguessable
# in practice, but less than 2^31.
chrome.storage.local.set
vimiumSecret: Math.floor Math.random() * 2000000000
completionSources =
bookmarks: new BookmarkCompleter
history: new HistoryCompleter
domains: new DomainCompleter
tabs: new TabCompleter
searchEngines: new SearchEngineCompleter
completers =
omni: new MultiCompleter [
completionSources.bookmarks
completionSources.history
completionSources.domains
completionSources.searchEngines
]
bookmarks: new MultiCompleter [completionSources.bookmarks]
tabs: new MultiCompleter [completionSources.tabs]
completionHandlers =
filter: (completer, request, port) ->
completer.filter request, (response) ->
# We use try here because this may fail if the sender has already navigated away from the original page.
# This can happen, for example, when posting completion suggestions from the SearchEngineCompleter
# (which is done asynchronously).
try
port.postMessage extend request, extend response, handler: "completions"
refresh: (completer, _, port) -> completer.refresh port
cancel: (completer, _, port) -> completer.cancel port
handleCompletions = (request, port) ->
completionHandlers[request.handler] completers[request.name], request, port
chrome.runtime.onConnect.addListener (port, name) ->
senderTabId = if port.sender.tab then port.sender.tab.id else null
# If this is a tab we've been waiting to open, execute any "tab loaded" handlers, e.g. to restore
# the tab's scroll position. Wait until domReady before doing this; otherwise operations like restoring
# the scroll position will not be possible.
if (port.name == "domReady" && senderTabId != null)
if (tabLoadedHandlers[senderTabId])
toCall = tabLoadedHandlers[senderTabId]
# Delete first to be sure there's no circular events.
delete tabLoadedHandlers[senderTabId]
toCall.call()
if (portHandlers[port.name])
port.onMessage.addListener(portHandlers[port.name])
chrome.runtime.onMessage.addListener((request, sender, sendResponse) ->
if (sendRequestHandlers[request.handler])
sendResponse(sendRequestHandlers[request.handler](request, sender))
# Ensure the sendResponse callback is freed.
return false)
#
# Used by the content scripts to get their full URL. This is needed for URLs like "view-source:http:# .."
# because window.location doesn't know anything about the Chrome-specific "view-source:".
#
getCurrentTabUrl = (request, sender) -> sender.tab.url
#
# Checks the user's preferences in local storage to determine if Vimium is enabled for the given URL, and
# whether any keys should be passed through to the underlying page.
# The source frame also informs us whether or not it has the focus, which allows us to track the URL of the
# active frame.
#
root.isEnabledForUrl = isEnabledForUrl = (request, sender) ->
urlForTab[sender.tab.id] = request.url if request.frameIsFocused
rule = Exclusions.getRule(request.url)
{
isEnabledForUrl: not rule or rule.passKeys
passKeys: rule?.passKeys or ""
}
onURLChange = (details) ->
chrome.tabs.sendMessage details.tabId, name: "checkEnabledAfterURLChange"
# Re-check whether Vimium is enabled for a frame when the url changes without a reload.
chrome.webNavigation.onHistoryStateUpdated.addListener onURLChange # history.pushState.
chrome.webNavigation.onReferenceFragmentUpdated.addListener onURLChange # Hash changed.
# Retrieves the help dialog HTML template from a file, and populates it with the latest keybindings.
# This is called by options.coffee.
root.helpDialogHtml = (showUnboundCommands, showCommandNames, customTitle) ->
commandsToKey = {}
for key of Commands.keyToCommandRegistry
command = Commands.keyToCommandRegistry[key].command
commandsToKey[command] = (commandsToKey[command] || []).concat(key)
dialogHtml = fetchFileContents("pages/help_dialog.html")
for group of Commands.commandGroups
dialogHtml = dialogHtml.replace("{{#{group}}}",
helpDialogHtmlForCommandGroup(group, commandsToKey, Commands.availableCommands,
showUnboundCommands, showCommandNames))
dialogHtml = dialogHtml.replace("{{version}}", currentVersion)
dialogHtml = dialogHtml.replace("{{title}}", customTitle || "Help")
dialogHtml
#
# Generates HTML for a given set of commands. commandGroups are defined in commands.js
#
helpDialogHtmlForCommandGroup = (group, commandsToKey, availableCommands,
showUnboundCommands, showCommandNames) ->
html = []
for command in Commands.commandGroups[group]
bindings = (commandsToKey[command] || [""]).join(", ")
if (showUnboundCommands || commandsToKey[command])
isAdvanced = Commands.advancedCommands.indexOf(command) >= 0
description = availableCommands[command].description
if bindings.length < 12
helpDialogHtmlForCommand html, isAdvanced, bindings, description, showCommandNames, command
else
# If the length of the bindings is too long, then we display the bindings on a separate row from the
# description. This prevents the column alignment from becoming out of whack.
helpDialogHtmlForCommand html, isAdvanced, bindings, "", false, ""
helpDialogHtmlForCommand html, isAdvanced, "", description, showCommandNames, command
html.join("\n")
helpDialogHtmlForCommand = (html, isAdvanced, bindings, description, showCommandNames, command) ->
html.push "<tr class='vimiumReset #{"advanced" if isAdvanced}'>"
if description
html.push "<td class='vimiumReset'>", Utils.escapeHtml(bindings), "</td>"
html.push "<td class='vimiumReset'>#{if description and bindings then ':' else ''}</td><td class='vimiumReset'>", description
html.push("<span class='vimiumReset commandName'>(#{command})</span>") if showCommandNames
else
html.push "<td class='vimiumReset' colspan='3' style='text-align: left;'>", Utils.escapeHtml(bindings)
html.push("</td></tr>")
#
# Fetches the contents of a file bundled with this extension.
#
fetchFileContents = (extensionFileName) ->
req = new XMLHttpRequest()
req.open("GET", chrome.runtime.getURL(extensionFileName), false) # false => synchronous
req.send()
req.responseText
#
# Returns the keys that can complete a valid command given the current key queue.
#
getCompletionKeysRequest = (request, keysToCheck = "") ->
name: "refreshCompletionKeys"
completionKeys: generateCompletionKeys(keysToCheck)
validFirstKeys: validFirstKeys
TabOperations =
# Opens the url in the current tab.
openUrlInCurrentTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
callback = (->) unless typeof callback == "function"
chrome.tabs.update tab.id, { url: Utils.convertToUrl(request.url) }, callback
# Opens request.url in new tab and switches to it if request.selected is true.
openUrlInNewTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
tabConfig =
url: Utils.convertToUrl request.url
index: tab.index + 1
selected: true
windowId: tab.windowId
openerTabId: tab.id
callback = (->) unless typeof callback == "function"
chrome.tabs.create tabConfig, callback
openUrlInIncognito: (request, callback = (->)) ->
callback = (->) unless typeof callback == "function"
chrome.windows.create {url: Utils.convertToUrl(request.url), incognito: true}, callback
#
# Copies or pastes some data (request.data) to/from the clipboard.
# We return null to avoid the return value from the copy operations being passed to sendResponse.
#
copyToClipboard = (request) -> Clipboard.copy(request.data); null
pasteFromClipboard = (request) -> Clipboard.paste()
#
# Selects the tab with the ID specified in request.id
#
selectSpecificTab = (request) ->
chrome.tabs.get(request.id, (tab) ->
chrome.windows.update(tab.windowId, { focused: true })
chrome.tabs.update(request.id, { selected: true }))
chrome.tabs.onSelectionChanged.addListener (tabId, selectionInfo) ->
if (selectionChangedHandlers.length > 0)
selectionChangedHandlers.pop().call()
repeatFunction = (func, totalCount, currentCount, frameId) ->
if (currentCount < totalCount)
func(
-> repeatFunction(func, totalCount, currentCount + 1, frameId),
frameId)
moveTab = (count) ->
chrome.tabs.getAllInWindow null, (tabs) ->
pinnedCount = (tabs.filter (tab) -> tab.pinned).length
chrome.tabs.getSelected null, (tab) ->
minIndex = if tab.pinned then 0 else pinnedCount
maxIndex = (if tab.pinned then pinnedCount else tabs.length) - 1
chrome.tabs.move tab.id,
index: Math.max minIndex, Math.min maxIndex, tab.index + count
# Start action functions
# These are commands which are bound to keystroke which must be handled by the background page. They are
# mapped in commands.coffee.
BackgroundCommands =
createTab: (callback) ->
chrome.tabs.query { active: true, currentWindow: true }, (tabs) ->
tab = tabs[0]
url = Settings.get "newTabUrl"
if url == "pages/blank.html"
# "pages/blank.html" does not work in incognito mode, so fall back to "chrome://newtab" instead.
url = if tab.incognito then "chrome://newtab" else chrome.runtime.getURL url
TabOperations.openUrlInNewTab { url }, callback
duplicateTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.duplicate(tab.id)
selectionChangedHandlers.push(callback))
moveTabToNewWindow: (callback) ->
chrome.tabs.query {active: true, currentWindow: true}, (tabs) ->
tab = tabs[0]
chrome.windows.create {tabId: tab.id, incognito: tab.incognito}
nextTab: (count) -> selectTab "next", count
previousTab: (count) -> selectTab "previous", count
firstTab: (count) -> selectTab "first", count
lastTab: (count) -> selectTab "last", count
removeTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.remove(tab.id)
selectionChangedHandlers.push(callback))
restoreTab: (callback) ->
# TODO: remove if-else -block when adopted into stable
if chrome.sessions
chrome.sessions.restore(null, (restoredSession) ->
callback() unless chrome.runtime.lastError)
else
# TODO(ilya): Should this be getLastFocused instead?
chrome.windows.getCurrent((window) ->
return unless (tabQueue[window.id] && tabQueue[window.id].length > 0)
tabQueueEntry = tabQueue[window.id].pop()
# Clean out the tabQueue so we don't have unused windows laying about.
delete tabQueue[window.id] if (tabQueue[window.id].length == 0)
# We have to chain a few callbacks to set the appropriate scroll position. We can't just wait until the
# tab is created because the content script is not available during the "loading" state. We need to
# wait until that's over before we can call setScrollPosition.
chrome.tabs.create({ url: tabQueueEntry.url, index: tabQueueEntry.positionIndex }, (tab) ->
tabLoadedHandlers[tab.id] = ->
chrome.tabs.sendRequest(tab.id,
name: "setScrollPosition",
scrollX: tabQueueEntry.scrollX,
scrollY: tabQueueEntry.scrollY)
callback()))
openCopiedUrlInCurrentTab: (request) -> TabOperations.openUrlInCurrentTab({ url: Clipboard.paste() })
openCopiedUrlInNewTab: (request) -> TabOperations.openUrlInNewTab({ url: Clipboard.paste() })
togglePinTab: (request) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.update(tab.id, { pinned: !tab.pinned }))
showHelp: (callback, frameId) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.sendMessage(tab.id,
{ name: "toggleHelpDialog", dialogHtml: helpDialogHtml(), frameId:frameId }))
moveTabLeft: (count) -> moveTab -count
moveTabRight: (count) -> moveTab count
nextFrame: (count,frameId) ->
chrome.tabs.getSelected null, (tab) ->
frameIdsForTab[tab.id] = cycleToFrame frameIdsForTab[tab.id], frameId, count
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: frameIdsForTab[tab.id][0], highlight: true
mainFrame: ->
chrome.tabs.getSelected null, (tab) ->
# The front end interprets a frameId of 0 to mean the main/top from.
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: 0, highlight: true
closeTabsOnLeft: -> removeTabsRelative "before"
closeTabsOnRight: -> removeTabsRelative "after"
closeOtherTabs: -> removeTabsRelative "both"
# Remove tabs before, after, or either side of the currently active tab
removeTabsRelative = (direction) ->
chrome.tabs.query {currentWindow: true}, (tabs) ->
chrome.tabs.query {currentWindow: true, active: true}, (activeTabs) ->
activeTabIndex = activeTabs[0].index
shouldDelete = switch direction
when "before"
(index) -> index < activeTabIndex
when "after"
(index) -> index > activeTabIndex
when "both"
(index) -> index != activeTabIndex
toRemove = []
for tab in tabs
if not tab.pinned and shouldDelete tab.index
toRemove.push tab.id
chrome.tabs.remove toRemove
# Selects a tab before or after the currently selected tab.
# - direction: "next", "previous", "first" or "last".
selectTab = (direction, count = 1) ->
chrome.tabs.getAllInWindow null, (tabs) ->
return unless tabs.length > 1
chrome.tabs.getSelected null, (currentTab) ->
toSelect =
switch direction
when "next"
(currentTab.index + count) % tabs.length
when "previous"
(currentTab.index - count + count * tabs.length) % tabs.length
when "first"
Math.min tabs.length - 1, count - 1
when "last"
Math.max 0, tabs.length - count
chrome.tabs.update tabs[toSelect].id, selected: true
updateOpenTabs = (tab, deleteFrames = false) ->
# Chrome might reuse the tab ID of a recently removed tab.
if tabInfoMap[tab.id]?.deletor
clearTimeout tabInfoMap[tab.id].deletor
tabInfoMap[tab.id] =
url: tab.url
positionIndex: tab.index
windowId: tab.windowId
scrollX: null
scrollY: null
deletor: null
# Frames are recreated on refresh
delete frameIdsForTab[tab.id] if deleteFrames
# Here's how we set the page icon. The default is "disabled", so if we do nothing else, then we get the
# grey-out disabled icon. Thereafter, we only set tab-specific icons, so there's no need to update the icon
# when we visit a tab on which Vimium isn't running.
#
# For active tabs, when a frame starts, it requests its active state via isEnabledForUrl. We also check the
# state every time a frame gets the focus. In both cases, the frame then updates the tab's icon accordingly.
#
# Exclusion rule changes (from either the options page or the page popup) propagate via the subsequent focus
# change. In particular, whenever a frame next gets the focus, it requests its new state and sets the icon
# accordingly.
#
setIcon = (request, sender) ->
path = switch request.icon
when "enabled" then "icons/browser_action_enabled.png"
when "partial" then "icons/browser_action_partial.png"
when "disabled" then "icons/browser_action_disabled.png"
chrome.browserAction.setIcon tabId: sender.tab.id, path: path
handleUpdateScrollPosition = (request, sender) ->
# See note regarding sender.tab at unregisterFrame.
updateScrollPosition sender.tab, request.scrollX, request.scrollY if sender.tab?
updateScrollPosition = (tab, scrollX, scrollY) ->
tabInfoMap[tab.id].scrollX = scrollX
tabInfoMap[tab.id].scrollY = scrollY
chrome.tabs.onUpdated.addListener (tabId, changeInfo, tab) ->
return unless changeInfo.status == "loading" # only do this once per URL change
cssConf =
allFrames: true
code: Settings.get("userDefinedLinkHintCss")
runAt: "document_start"
chrome.tabs.insertCSS tabId, cssConf, -> chrome.runtime.lastError
updateOpenTabs(tab) if changeInfo.url?
chrome.tabs.onAttached.addListener (tabId, attachedInfo) ->
# We should update all the tabs in the old window and the new window.
if tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(tabInfoMap[tabId].windowId)
updatePositionsAndWindowsForAllTabsInWindow(attachedInfo.newWindowId)
chrome.tabs.onMoved.addListener (tabId, moveInfo) ->
updatePositionsAndWindowsForAllTabsInWindow(moveInfo.windowId)
chrome.tabs.onRemoved.addListener (tabId) ->
openTabInfo = tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(openTabInfo.windowId)
# If we restore pages that content scripts can't run on, they'll ignore Vimium keystrokes when they
# reappear. Pretend they never existed and adjust tab indices accordingly. Could possibly expand this into
# a blacklist in the future.
unless chrome.sessions
if (/^(chrome|view-source:)[^:]*:\/\/.*/.test(openTabInfo.url))
for i of tabQueue[openTabInfo.windowId]
if (tabQueue[openTabInfo.windowId][i].positionIndex > openTabInfo.positionIndex)
tabQueue[openTabInfo.windowId][i].positionIndex--
return
if (tabQueue[openTabInfo.windowId])
tabQueue[openTabInfo.windowId].push(openTabInfo)
else
tabQueue[openTabInfo.windowId] = [openTabInfo]
# keep the reference around for a while to wait for the last messages from the closed tab (e.g. for updating
# scroll position)
tabInfoMap.deletor = -> delete tabInfoMap[tabId]
setTimeout tabInfoMap.deletor, 1000
delete frameIdsForTab[tabId]
delete urlForTab[tabId]
unless chrome.sessions
chrome.windows.onRemoved.addListener (windowId) -> delete tabQueue[windowId]
# End action functions
updatePositionsAndWindowsForAllTabsInWindow = (windowId) ->
chrome.tabs.getAllInWindow(windowId, (tabs) ->
for tab in tabs
openTabInfo = tabInfoMap[tab.id]
if (openTabInfo)
openTabInfo.positionIndex = tab.index
openTabInfo.windowId = tab.windowId)
splitKeyIntoFirstAndSecond = (key) ->
if (key.search(namedKeyRegex) == 0)
{ first: RegExp.$1, second: RegExp.$2 }
else
{ first: key[0], second: key.slice(1) }
getActualKeyStrokeLength = (key) ->
if (key.search(namedKeyRegex) == 0)
1 + getActualKeyStrokeLength(RegExp.$2)
else
key.length
populateValidFirstKeys = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 2)
validFirstKeys[splitKeyIntoFirstAndSecond(key).first] = true
populateSingleKeyCommands = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 1)
singleKeyCommands.push(key)
# Invoked by options.coffee.
root.refreshCompletionKeysAfterMappingSave = ->
validFirstKeys = {}
singleKeyCommands = []
populateValidFirstKeys()
populateSingleKeyCommands()
sendRequestToAllTabs(getCompletionKeysRequest())
# Generates a list of keys that can complete a valid command given the current key queue or the one passed in
generateCompletionKeys = (keysToCheck) ->
splitHash = splitKeyQueue(keysToCheck || keyQueue)
command = splitHash.command
count = splitHash.count
completionKeys = singleKeyCommands.slice(0)
if (getActualKeyStrokeLength(command) == 1)
for key of Commands.keyToCommandRegistry
splitKey = splitKeyIntoFirstAndSecond(key)
if (splitKey.first == command)
completionKeys.push(splitKey.second)
completionKeys
splitKeyQueue = (queue) ->
match = /([1-9][0-9]*)?(.*)/.exec(queue)
count = parseInt(match[1], 10)
command = match[2]
{ count: count, command: command }
handleKeyDown = (request, port) ->
key = request.keyChar
if (key == "<ESC>")
console.log("clearing keyQueue")
keyQueue = ""
else
console.log("checking keyQueue: [", keyQueue + key, "]")
keyQueue = checkKeyQueue(keyQueue + key, port.sender.tab.id, request.frameId)
console.log("new KeyQueue: " + keyQueue)
# Tell the content script whether there are keys in the queue.
# FIXME: There is a race condition here. The behaviour in the content script depends upon whether this message gets
# back there before or after the next keystroke.
# That being said, I suspect there are other similar race conditions here, for example in checkKeyQueue().
# Steve (23 Aug, 14).
chrome.tabs.sendMessage(port.sender.tab.id,
name: "currentKeyQueue",
keyQueue: keyQueue)
checkKeyQueue = (keysToCheck, tabId, frameId) ->
refreshedCompletionKeys = false
splitHash = splitKeyQueue(keysToCheck)
command = splitHash.command
count = splitHash.count
return keysToCheck if command.length == 0
count = 1 if isNaN(count)
if (Commands.keyToCommandRegistry[command])
registryEntry = Commands.keyToCommandRegistry[command]
runCommand = true
if registryEntry.noRepeat
count = 1
else if registryEntry.repeatLimit and count > registryEntry.repeatLimit
runCommand = confirm """
You have asked Vimium to perform #{count} repeats of the command:
#{Commands.availableCommands[registryEntry.command].description}
Are you sure you want to continue?
"""
if runCommand
if not registryEntry.isBackgroundCommand
chrome.tabs.sendMessage tabId,
name: "executePageCommand"
command: registryEntry.command
frameId: frameId
count: count
completionKeys: generateCompletionKeys ""
registryEntry: registryEntry
refreshedCompletionKeys = true
else
if registryEntry.passCountToFunction
BackgroundCommands[registryEntry.command](count, frameId)
else if registryEntry.noRepeat
BackgroundCommands[registryEntry.command](frameId)
else
repeatFunction(BackgroundCommands[registryEntry.command], count, 0, frameId)
newKeyQueue = ""
else if (getActualKeyStrokeLength(command) > 1)
splitKey = splitKeyIntoFirstAndSecond(command)
# The second key might be a valid command by its self.
if (Commands.keyToCommandRegistry[splitKey.second])
newKeyQueue = checkKeyQueue(splitKey.second, tabId, frameId)
else
newKeyQueue = (if validFirstKeys[splitKey.second] then splitKey.second else "")
else
newKeyQueue = (if validFirstKeys[command] then count.toString() + command else "")
# If we haven't sent the completion keys piggybacked on executePageCommand,
# send them by themselves.
unless refreshedCompletionKeys
chrome.tabs.sendMessage(tabId, getCompletionKeysRequest(null, newKeyQueue), null)
newKeyQueue
#
# Message all tabs. Args should be the arguments hash used by the Chrome sendRequest API.
#
sendRequestToAllTabs = (args) ->
chrome.windows.getAll({ populate: true }, (windows) ->
for window in windows
for tab in window.tabs
chrome.tabs.sendMessage(tab.id, args, null))
openOptionsPageInNewTab = ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.create({ url: chrome.runtime.getURL("pages/options.html"), index: tab.index + 1 }))
registerFrame = (request, sender) ->
(frameIdsForTab[sender.tab.id] ?= []).push request.frameId
unregisterFrame = (request, sender) ->
# When a tab is closing, Chrome sometimes passes messages without sender.tab. Therefore, we guard against
# this.
tabId = sender.tab?.id
return unless tabId?
if frameIdsForTab[tabId]?
if request.tab_is_closing
updateOpenTabs sender.tab, true
else
frameIdsForTab[tabId] = frameIdsForTab[tabId].filter (id) -> id != request.frameId
handleFrameFocused = (request, sender) ->
tabId = sender.tab.id
# Cycle frameIdsForTab to the focused frame. However, also ensure that we don't inadvertently register a
# frame which wasn't previously registered (such as a frameset).
if frameIdsForTab[tabId]?
frameIdsForTab[tabId] = cycleToFrame frameIdsForTab[tabId], request.frameId
# Inform all frames that a frame has received the focus.
chrome.tabs.sendMessage sender.tab.id,
name: "frameFocused"
focusFrameId: request.frameId
# Rotate through frames to the frame count places after frameId.
cycleToFrame = (frames, frameId, count = 0) ->
frames ||= []
# We can't always track which frame chrome has focussed, but here we learn that it's frameId; so add an
# additional offset such that we do indeed start from frameId.
count = (count + Math.max 0, frames.indexOf frameId) % frames.length
[frames[count..]..., frames[0...count]...]
# Send a message to all frames in the current tab.
sendMessageToFrames = (request, sender) ->
chrome.tabs.sendMessage sender.tab.id, request.message
# For debugging only. This allows content scripts to log messages to the background page's console.
bgLog = (request, sender) ->
console.log "#{sender.tab.id}/#{request.frameId}", request.message
# Port handler mapping
portHandlers =
keyDown: handleKeyDown,
completions: handleCompletions
sendRequestHandlers =
getCompletionKeys: getCompletionKeysRequest
getCurrentTabUrl: getCurrentTabUrl
openUrlInNewTab: TabOperations.openUrlInNewTab
openUrlInIncognito: TabOperations.openUrlInIncognito
openUrlInCurrentTab: TabOperations.openUrlInCurrentTab
openOptionsPageInNewTab: openOptionsPageInNewTab
registerFrame: registerFrame
unregisterFrame: unregisterFrame
frameFocused: handleFrameFocused
nextFrame: (request) -> BackgroundCommands.nextFrame 1, request.frameId
updateScrollPosition: handleUpdateScrollPosition
copyToClipboard: copyToClipboard
pasteFromClipboard: pasteFromClipboard
isEnabledForUrl: isEnabledForUrl
selectSpecificTab: selectSpecificTab
createMark: Marks.create.bind(Marks)
gotoMark: Marks.goto.bind(Marks)
setIcon: setIcon
sendMessageToFrames: sendMessageToFrames
log: bgLog
fetchFileContents: (request, sender) -> fetchFileContents request.fileName
# We always remove chrome.storage.local/findModeRawQueryListIncognito on startup.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Remove chrome.storage.local/findModeRawQueryListIncognito if there are no remaining incognito-mode windows.
# Since the common case is that there are none to begin with, we first check whether the key is set at all.
chrome.tabs.onRemoved.addListener (tabId) ->
chrome.storage.local.get "findModeRawQueryListIncognito", (items) ->
if items.findModeRawQueryListIncognito
chrome.windows.getAll null, (windows) ->
for window in windows
return if window.incognito
# There are no remaining incognito-mode tabs, and findModeRawQueryListIncognito is set.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Tidy up tab caches when tabs are removed. We cannot rely on unregisterFrame because Chrome does not always
# provide sender.tab there.
# NOTE(smblott) (2015-05-05) This may break restoreTab on legacy Chrome versions, but we'll be moving to
# chrome.sessions support only soon anyway.
chrome.tabs.onRemoved.addListener (tabId) ->
delete cache[tabId] for cache in [ frameIdsForTab, urlForTab, tabInfoMap ]
# Convenience function for development use.
window.runTests = -> open(chrome.runtime.getURL('tests/dom_tests/dom_tests.html'))
#
# Begin initialization.
#
Commands.clearKeyMappingsAndSetDefaults()
if Settings.has("keyMappings")
Commands.parseCustomKeyMappings(Settings.get("keyMappings"))
populateValidFirstKeys()
populateSingleKeyCommands()
# Show notification on upgrade.
showUpgradeMessage = ->
# Avoid showing the upgrade notification when previousVersion is undefined, which is the case for new
# installs.
Settings.set "previousVersion", currentVersion unless Settings.get "previousVersion"
if Utils.compareVersions(currentVersion, Settings.get "previousVersion" ) == 1
notificationId = "VimiumUpgradeNotification"
notification =
type: "basic"
iconUrl: chrome.runtime.getURL "icons/vimium.png"
title: "Vimium Upgrade"
message: "Vimium has been upgraded to version #{currentVersion}. Click here for more information."
isClickable: true
if chrome.notifications?.create?
chrome.notifications.create notificationId, notification, ->
unless chrome.runtime.lastError
Settings.set "previousVersion", currentVersion
chrome.notifications.onClicked.addListener (id) ->
if id == notificationId
TabOperations.openUrlInNewTab url: "https://github.com/philc/vimium#release-notes"
else
# We need to wait for the user to accept the "notifications" permission.
chrome.permissions.onAdded.addListener showUpgradeMessage
# Ensure that tabInfoMap is populated when Vimium is installed.
chrome.windows.getAll { populate: true }, (windows) ->
for window in windows
for tab in window.tabs
updateOpenTabs(tab)
createScrollPositionHandler = ->
(response) -> updateScrollPosition(tab, response.scrollX, response.scrollY) if response?
chrome.tabs.sendMessage(tab.id, { name: "getScrollPosition" }, createScrollPositionHandler())
showUpgradeMessage()
root.TabOperations = TabOperations
| 92131 | root = exports ? window
# The browser may have tabs already open. We inject the content scripts immediately so that they work straight
# away.
chrome.runtime.onInstalled.addListener ({ reason }) ->
# See https://developer.chrome.com/extensions/runtime#event-onInstalled
return if reason in [ "chrome_update", "shared_module_update" ]
manifest = chrome.runtime.getManifest()
# Content scripts loaded on every page should be in the same group. We assume it is the first.
contentScripts = manifest.content_scripts[0]
jobs = [ [ chrome.tabs.executeScript, contentScripts.js ], [ chrome.tabs.insertCSS, contentScripts.css ] ]
# Chrome complains if we don't evaluate chrome.runtime.lastError on errors (and we get errors for tabs on
# which Vimium cannot run).
checkLastRuntimeError = -> chrome.runtime.lastError
chrome.tabs.query { status: "complete" }, (tabs) ->
for tab in tabs
for [ func, files ] in jobs
for file in files
func tab.id, { file: file, allFrames: contentScripts.all_frames }, checkLastRuntimeError
currentVersion = Utils.getCurrentVersion()
tabQueue = {} # windowId -> Array
tabInfoMap = {} # tabId -> object with various tab properties
keyQueue = "" # Queue of keys typed
validFirstKeys = {}
singleKeyCommands = []
focusedFrame = null
frameIdsForTab = {}
root.urlForTab = {}
# Keys are either literal characters, or "named" - for example <a-b> (alt+b), <left> (left arrow) or <f12>
# This regular expression captures two groups: the first is a named key, the second is the remainder of
# the string.
namedKeyRegex = /^(<(?:[amc]-.|(?:[amc]-)?[a-z0-9]{2,5})>)(.*)$/
# Event handlers
selectionChangedHandlers = []
# Note. tabLoadedHandlers handlers is exported for use also by "marks.coffee".
root.tabLoadedHandlers = {} # tabId -> function()
# A secret, available only within the current instantiation of Vimium. The secret is big, likely unguessable
# in practice, but less than 2^31.
chrome.storage.local.set
vimiumSecret: Math.floor Math.random() * 2000000000
completionSources =
bookmarks: new BookmarkCompleter
history: new HistoryCompleter
domains: new DomainCompleter
tabs: new TabCompleter
searchEngines: new SearchEngineCompleter
completers =
omni: new MultiCompleter [
completionSources.bookmarks
completionSources.history
completionSources.domains
completionSources.searchEngines
]
bookmarks: new MultiCompleter [completionSources.bookmarks]
tabs: new MultiCompleter [completionSources.tabs]
completionHandlers =
filter: (completer, request, port) ->
completer.filter request, (response) ->
# We use try here because this may fail if the sender has already navigated away from the original page.
# This can happen, for example, when posting completion suggestions from the SearchEngineCompleter
# (which is done asynchronously).
try
port.postMessage extend request, extend response, handler: "completions"
refresh: (completer, _, port) -> completer.refresh port
cancel: (completer, _, port) -> completer.cancel port
handleCompletions = (request, port) ->
completionHandlers[request.handler] completers[request.name], request, port
chrome.runtime.onConnect.addListener (port, name) ->
senderTabId = if port.sender.tab then port.sender.tab.id else null
# If this is a tab we've been waiting to open, execute any "tab loaded" handlers, e.g. to restore
# the tab's scroll position. Wait until domReady before doing this; otherwise operations like restoring
# the scroll position will not be possible.
if (port.name == "domReady" && senderTabId != null)
if (tabLoadedHandlers[senderTabId])
toCall = tabLoadedHandlers[senderTabId]
# Delete first to be sure there's no circular events.
delete tabLoadedHandlers[senderTabId]
toCall.call()
if (portHandlers[port.name])
port.onMessage.addListener(portHandlers[port.name])
chrome.runtime.onMessage.addListener((request, sender, sendResponse) ->
if (sendRequestHandlers[request.handler])
sendResponse(sendRequestHandlers[request.handler](request, sender))
# Ensure the sendResponse callback is freed.
return false)
#
# Used by the content scripts to get their full URL. This is needed for URLs like "view-source:http:# .."
# because window.location doesn't know anything about the Chrome-specific "view-source:".
#
getCurrentTabUrl = (request, sender) -> sender.tab.url
#
# Checks the user's preferences in local storage to determine if Vimium is enabled for the given URL, and
# whether any keys should be passed through to the underlying page.
# The source frame also informs us whether or not it has the focus, which allows us to track the URL of the
# active frame.
#
root.isEnabledForUrl = isEnabledForUrl = (request, sender) ->
urlForTab[sender.tab.id] = request.url if request.frameIsFocused
rule = Exclusions.getRule(request.url)
{
isEnabledForUrl: not rule or rule.passKeys
passKeys: rule?.passKeys or ""
}
onURLChange = (details) ->
chrome.tabs.sendMessage details.tabId, name: "checkEnabledAfterURLChange"
# Re-check whether Vimium is enabled for a frame when the url changes without a reload.
chrome.webNavigation.onHistoryStateUpdated.addListener onURLChange # history.pushState.
chrome.webNavigation.onReferenceFragmentUpdated.addListener onURLChange # Hash changed.
# Retrieves the help dialog HTML template from a file, and populates it with the latest keybindings.
# This is called by options.coffee.
root.helpDialogHtml = (showUnboundCommands, showCommandNames, customTitle) ->
commandsToKey = {}
for key of Commands.keyToCommandRegistry
command = Commands.keyToCommandRegistry[key].command
commandsToKey[command] = (commandsToKey[command] || []).concat(key)
dialogHtml = fetchFileContents("pages/help_dialog.html")
for group of Commands.commandGroups
dialogHtml = dialogHtml.replace("{{#{group}}}",
helpDialogHtmlForCommandGroup(group, commandsToKey, Commands.availableCommands,
showUnboundCommands, showCommandNames))
dialogHtml = dialogHtml.replace("{{version}}", currentVersion)
dialogHtml = dialogHtml.replace("{{title}}", customTitle || "Help")
dialogHtml
#
# Generates HTML for a given set of commands. commandGroups are defined in commands.js
#
helpDialogHtmlForCommandGroup = (group, commandsToKey, availableCommands,
showUnboundCommands, showCommandNames) ->
html = []
for command in Commands.commandGroups[group]
bindings = (commandsToKey[command] || [""]).join(", ")
if (showUnboundCommands || commandsToKey[command])
isAdvanced = Commands.advancedCommands.indexOf(command) >= 0
description = availableCommands[command].description
if bindings.length < 12
helpDialogHtmlForCommand html, isAdvanced, bindings, description, showCommandNames, command
else
# If the length of the bindings is too long, then we display the bindings on a separate row from the
# description. This prevents the column alignment from becoming out of whack.
helpDialogHtmlForCommand html, isAdvanced, bindings, "", false, ""
helpDialogHtmlForCommand html, isAdvanced, "", description, showCommandNames, command
html.join("\n")
helpDialogHtmlForCommand = (html, isAdvanced, bindings, description, showCommandNames, command) ->
html.push "<tr class='vimiumReset #{"advanced" if isAdvanced}'>"
if description
html.push "<td class='vimiumReset'>", Utils.escapeHtml(bindings), "</td>"
html.push "<td class='vimiumReset'>#{if description and bindings then ':' else ''}</td><td class='vimiumReset'>", description
html.push("<span class='vimiumReset commandName'>(#{command})</span>") if showCommandNames
else
html.push "<td class='vimiumReset' colspan='3' style='text-align: left;'>", Utils.escapeHtml(bindings)
html.push("</td></tr>")
#
# Fetches the contents of a file bundled with this extension.
#
fetchFileContents = (extensionFileName) ->
req = new XMLHttpRequest()
req.open("GET", chrome.runtime.getURL(extensionFileName), false) # false => synchronous
req.send()
req.responseText
#
# Returns the keys that can complete a valid command given the current key queue.
#
getCompletionKeysRequest = (request, keysToCheck = "") ->
name: "refreshCompletionKeys"
completionKeys: generateCompletionKeys(keysToCheck)
validFirstKeys: validFirstKeys
TabOperations =
# Opens the url in the current tab.
openUrlInCurrentTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
callback = (->) unless typeof callback == "function"
chrome.tabs.update tab.id, { url: Utils.convertToUrl(request.url) }, callback
# Opens request.url in new tab and switches to it if request.selected is true.
openUrlInNewTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
tabConfig =
url: Utils.convertToUrl request.url
index: tab.index + 1
selected: true
windowId: tab.windowId
openerTabId: tab.id
callback = (->) unless typeof callback == "function"
chrome.tabs.create tabConfig, callback
openUrlInIncognito: (request, callback = (->)) ->
callback = (->) unless typeof callback == "function"
chrome.windows.create {url: Utils.convertToUrl(request.url), incognito: true}, callback
#
# Copies or pastes some data (request.data) to/from the clipboard.
# We return null to avoid the return value from the copy operations being passed to sendResponse.
#
copyToClipboard = (request) -> Clipboard.copy(request.data); null
pasteFromClipboard = (request) -> Clipboard.paste()
#
# Selects the tab with the ID specified in request.id
#
selectSpecificTab = (request) ->
chrome.tabs.get(request.id, (tab) ->
chrome.windows.update(tab.windowId, { focused: true })
chrome.tabs.update(request.id, { selected: true }))
chrome.tabs.onSelectionChanged.addListener (tabId, selectionInfo) ->
if (selectionChangedHandlers.length > 0)
selectionChangedHandlers.pop().call()
repeatFunction = (func, totalCount, currentCount, frameId) ->
if (currentCount < totalCount)
func(
-> repeatFunction(func, totalCount, currentCount + 1, frameId),
frameId)
moveTab = (count) ->
chrome.tabs.getAllInWindow null, (tabs) ->
pinnedCount = (tabs.filter (tab) -> tab.pinned).length
chrome.tabs.getSelected null, (tab) ->
minIndex = if tab.pinned then 0 else pinnedCount
maxIndex = (if tab.pinned then pinnedCount else tabs.length) - 1
chrome.tabs.move tab.id,
index: Math.max minIndex, Math.min maxIndex, tab.index + count
# Start action functions
# These are commands which are bound to keystroke which must be handled by the background page. They are
# mapped in commands.coffee.
BackgroundCommands =
createTab: (callback) ->
chrome.tabs.query { active: true, currentWindow: true }, (tabs) ->
tab = tabs[0]
url = Settings.get "newTabUrl"
if url == "pages/blank.html"
# "pages/blank.html" does not work in incognito mode, so fall back to "chrome://newtab" instead.
url = if tab.incognito then "chrome://newtab" else chrome.runtime.getURL url
TabOperations.openUrlInNewTab { url }, callback
duplicateTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.duplicate(tab.id)
selectionChangedHandlers.push(callback))
moveTabToNewWindow: (callback) ->
chrome.tabs.query {active: true, currentWindow: true}, (tabs) ->
tab = tabs[0]
chrome.windows.create {tabId: tab.id, incognito: tab.incognito}
nextTab: (count) -> selectTab "next", count
previousTab: (count) -> selectTab "previous", count
firstTab: (count) -> selectTab "first", count
lastTab: (count) -> selectTab "last", count
removeTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.remove(tab.id)
selectionChangedHandlers.push(callback))
restoreTab: (callback) ->
# TODO: remove if-else -block when adopted into stable
if chrome.sessions
chrome.sessions.restore(null, (restoredSession) ->
callback() unless chrome.runtime.lastError)
else
# TODO(ily<NAME>): Should this be getLastFocused instead?
chrome.windows.getCurrent((window) ->
return unless (tabQueue[window.id] && tabQueue[window.id].length > 0)
tabQueueEntry = tabQueue[window.id].pop()
# Clean out the tabQueue so we don't have unused windows laying about.
delete tabQueue[window.id] if (tabQueue[window.id].length == 0)
# We have to chain a few callbacks to set the appropriate scroll position. We can't just wait until the
# tab is created because the content script is not available during the "loading" state. We need to
# wait until that's over before we can call setScrollPosition.
chrome.tabs.create({ url: tabQueueEntry.url, index: tabQueueEntry.positionIndex }, (tab) ->
tabLoadedHandlers[tab.id] = ->
chrome.tabs.sendRequest(tab.id,
name: "setScrollPosition",
scrollX: tabQueueEntry.scrollX,
scrollY: tabQueueEntry.scrollY)
callback()))
openCopiedUrlInCurrentTab: (request) -> TabOperations.openUrlInCurrentTab({ url: Clipboard.paste() })
openCopiedUrlInNewTab: (request) -> TabOperations.openUrlInNewTab({ url: Clipboard.paste() })
togglePinTab: (request) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.update(tab.id, { pinned: !tab.pinned }))
showHelp: (callback, frameId) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.sendMessage(tab.id,
{ name: "toggleHelpDialog", dialogHtml: helpDialogHtml(), frameId:frameId }))
moveTabLeft: (count) -> moveTab -count
moveTabRight: (count) -> moveTab count
nextFrame: (count,frameId) ->
chrome.tabs.getSelected null, (tab) ->
frameIdsForTab[tab.id] = cycleToFrame frameIdsForTab[tab.id], frameId, count
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: frameIdsForTab[tab.id][0], highlight: true
mainFrame: ->
chrome.tabs.getSelected null, (tab) ->
# The front end interprets a frameId of 0 to mean the main/top from.
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: 0, highlight: true
closeTabsOnLeft: -> removeTabsRelative "before"
closeTabsOnRight: -> removeTabsRelative "after"
closeOtherTabs: -> removeTabsRelative "both"
# Remove tabs before, after, or either side of the currently active tab
removeTabsRelative = (direction) ->
chrome.tabs.query {currentWindow: true}, (tabs) ->
chrome.tabs.query {currentWindow: true, active: true}, (activeTabs) ->
activeTabIndex = activeTabs[0].index
shouldDelete = switch direction
when "before"
(index) -> index < activeTabIndex
when "after"
(index) -> index > activeTabIndex
when "both"
(index) -> index != activeTabIndex
toRemove = []
for tab in tabs
if not tab.pinned and shouldDelete tab.index
toRemove.push tab.id
chrome.tabs.remove toRemove
# Selects a tab before or after the currently selected tab.
# - direction: "next", "previous", "first" or "last".
selectTab = (direction, count = 1) ->
chrome.tabs.getAllInWindow null, (tabs) ->
return unless tabs.length > 1
chrome.tabs.getSelected null, (currentTab) ->
toSelect =
switch direction
when "next"
(currentTab.index + count) % tabs.length
when "previous"
(currentTab.index - count + count * tabs.length) % tabs.length
when "first"
Math.min tabs.length - 1, count - 1
when "last"
Math.max 0, tabs.length - count
chrome.tabs.update tabs[toSelect].id, selected: true
updateOpenTabs = (tab, deleteFrames = false) ->
# Chrome might reuse the tab ID of a recently removed tab.
if tabInfoMap[tab.id]?.deletor
clearTimeout tabInfoMap[tab.id].deletor
tabInfoMap[tab.id] =
url: tab.url
positionIndex: tab.index
windowId: tab.windowId
scrollX: null
scrollY: null
deletor: null
# Frames are recreated on refresh
delete frameIdsForTab[tab.id] if deleteFrames
# Here's how we set the page icon. The default is "disabled", so if we do nothing else, then we get the
# grey-out disabled icon. Thereafter, we only set tab-specific icons, so there's no need to update the icon
# when we visit a tab on which Vimium isn't running.
#
# For active tabs, when a frame starts, it requests its active state via isEnabledForUrl. We also check the
# state every time a frame gets the focus. In both cases, the frame then updates the tab's icon accordingly.
#
# Exclusion rule changes (from either the options page or the page popup) propagate via the subsequent focus
# change. In particular, whenever a frame next gets the focus, it requests its new state and sets the icon
# accordingly.
#
setIcon = (request, sender) ->
path = switch request.icon
when "enabled" then "icons/browser_action_enabled.png"
when "partial" then "icons/browser_action_partial.png"
when "disabled" then "icons/browser_action_disabled.png"
chrome.browserAction.setIcon tabId: sender.tab.id, path: path
handleUpdateScrollPosition = (request, sender) ->
# See note regarding sender.tab at unregisterFrame.
updateScrollPosition sender.tab, request.scrollX, request.scrollY if sender.tab?
updateScrollPosition = (tab, scrollX, scrollY) ->
tabInfoMap[tab.id].scrollX = scrollX
tabInfoMap[tab.id].scrollY = scrollY
chrome.tabs.onUpdated.addListener (tabId, changeInfo, tab) ->
return unless changeInfo.status == "loading" # only do this once per URL change
cssConf =
allFrames: true
code: Settings.get("userDefinedLinkHintCss")
runAt: "document_start"
chrome.tabs.insertCSS tabId, cssConf, -> chrome.runtime.lastError
updateOpenTabs(tab) if changeInfo.url?
chrome.tabs.onAttached.addListener (tabId, attachedInfo) ->
# We should update all the tabs in the old window and the new window.
if tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(tabInfoMap[tabId].windowId)
updatePositionsAndWindowsForAllTabsInWindow(attachedInfo.newWindowId)
chrome.tabs.onMoved.addListener (tabId, moveInfo) ->
updatePositionsAndWindowsForAllTabsInWindow(moveInfo.windowId)
chrome.tabs.onRemoved.addListener (tabId) ->
openTabInfo = tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(openTabInfo.windowId)
# If we restore pages that content scripts can't run on, they'll ignore Vimium keystrokes when they
# reappear. Pretend they never existed and adjust tab indices accordingly. Could possibly expand this into
# a blacklist in the future.
unless chrome.sessions
if (/^(chrome|view-source:)[^:]*:\/\/.*/.test(openTabInfo.url))
for i of tabQueue[openTabInfo.windowId]
if (tabQueue[openTabInfo.windowId][i].positionIndex > openTabInfo.positionIndex)
tabQueue[openTabInfo.windowId][i].positionIndex--
return
if (tabQueue[openTabInfo.windowId])
tabQueue[openTabInfo.windowId].push(openTabInfo)
else
tabQueue[openTabInfo.windowId] = [openTabInfo]
# keep the reference around for a while to wait for the last messages from the closed tab (e.g. for updating
# scroll position)
tabInfoMap.deletor = -> delete tabInfoMap[tabId]
setTimeout tabInfoMap.deletor, 1000
delete frameIdsForTab[tabId]
delete urlForTab[tabId]
unless chrome.sessions
chrome.windows.onRemoved.addListener (windowId) -> delete tabQueue[windowId]
# End action functions
updatePositionsAndWindowsForAllTabsInWindow = (windowId) ->
chrome.tabs.getAllInWindow(windowId, (tabs) ->
for tab in tabs
openTabInfo = tabInfoMap[tab.id]
if (openTabInfo)
openTabInfo.positionIndex = tab.index
openTabInfo.windowId = tab.windowId)
splitKeyIntoFirstAndSecond = (key) ->
if (key.search(namedKeyRegex) == 0)
{ first: RegExp.$1, second: RegExp.$2 }
else
{ first: key[0], second: key.slice(1) }
getActualKeyStrokeLength = (key) ->
if (key.search(namedKeyRegex) == 0)
1 + getActualKeyStrokeLength(RegExp.$2)
else
key.length
populateValidFirstKeys = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 2)
validFirstKeys[splitKeyIntoFirstAndSecond(key).first] = true
populateSingleKeyCommands = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 1)
singleKeyCommands.push(key)
# Invoked by options.coffee.
root.refreshCompletionKeysAfterMappingSave = ->
validFirstKeys = {}
singleKeyCommands = []
populateValidFirstKeys()
populateSingleKeyCommands()
sendRequestToAllTabs(getCompletionKeysRequest())
# Generates a list of keys that can complete a valid command given the current key queue or the one passed in
generateCompletionKeys = (keysToCheck) ->
splitHash = splitKeyQueue(keysToCheck || keyQueue)
command = splitHash.command
count = splitHash.count
completionKeys = singleKeyCommands.slice(0)
if (getActualKeyStrokeLength(command) == 1)
for key of Commands.keyToCommandRegistry
splitKey = splitKeyIntoFirstAndSecond(key)
if (splitKey.first == command)
completionKeys.push(splitKey.second)
completionKeys
splitKeyQueue = (queue) ->
match = /([1-9][0-9]*)?(.*)/.exec(queue)
count = parseInt(match[1], 10)
command = match[2]
{ count: count, command: command }
handleKeyDown = (request, port) ->
key = request.keyChar
if (key == "<ESC>")
console.log("clearing keyQueue")
keyQueue = ""
else
console.log("checking keyQueue: [", keyQueue + key, "]")
keyQueue = checkKeyQueue(keyQueue + key, port.sender.tab.id, request.frameId)
console.log("new KeyQueue: " + keyQueue)
# Tell the content script whether there are keys in the queue.
# FIXME: There is a race condition here. The behaviour in the content script depends upon whether this message gets
# back there before or after the next keystroke.
# That being said, I suspect there are other similar race conditions here, for example in checkKeyQueue().
# <NAME> (23 Aug, 14).
chrome.tabs.sendMessage(port.sender.tab.id,
name: "currentKeyQueue",
keyQueue: keyQueue)
checkKeyQueue = (keysToCheck, tabId, frameId) ->
refreshedCompletionKeys = false
splitHash = splitKeyQueue(keysToCheck)
command = splitHash.command
count = splitHash.count
return keysToCheck if command.length == 0
count = 1 if isNaN(count)
if (Commands.keyToCommandRegistry[command])
registryEntry = Commands.keyToCommandRegistry[command]
runCommand = true
if registryEntry.noRepeat
count = 1
else if registryEntry.repeatLimit and count > registryEntry.repeatLimit
runCommand = confirm """
You have asked Vimium to perform #{count} repeats of the command:
#{Commands.availableCommands[registryEntry.command].description}
Are you sure you want to continue?
"""
if runCommand
if not registryEntry.isBackgroundCommand
chrome.tabs.sendMessage tabId,
name: "executePageCommand"
command: registryEntry.command
frameId: frameId
count: count
completionKeys: generateCompletionKeys ""
registryEntry: registryEntry
refreshedCompletionKeys = true
else
if registryEntry.passCountToFunction
BackgroundCommands[registryEntry.command](count, frameId)
else if registryEntry.noRepeat
BackgroundCommands[registryEntry.command](frameId)
else
repeatFunction(BackgroundCommands[registryEntry.command], count, 0, frameId)
newKeyQueue = ""
else if (getActualKeyStrokeLength(command) > 1)
splitKey = splitKeyIntoFirstAndSecond(command)
# The second key might be a valid command by its self.
if (Commands.keyToCommandRegistry[splitKey.second])
newKeyQueue = checkKeyQueue(splitKey.second, tabId, frameId)
else
newKeyQueue = (if validFirstKeys[splitKey.second] then splitKey.second else "")
else
newKeyQueue = (if validFirstKeys[command] then count.toString() + command else "")
# If we haven't sent the completion keys piggybacked on executePageCommand,
# send them by themselves.
unless refreshedCompletionKeys
chrome.tabs.sendMessage(tabId, getCompletionKeysRequest(null, newKeyQueue), null)
newKeyQueue
#
# Message all tabs. Args should be the arguments hash used by the Chrome sendRequest API.
#
sendRequestToAllTabs = (args) ->
chrome.windows.getAll({ populate: true }, (windows) ->
for window in windows
for tab in window.tabs
chrome.tabs.sendMessage(tab.id, args, null))
openOptionsPageInNewTab = ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.create({ url: chrome.runtime.getURL("pages/options.html"), index: tab.index + 1 }))
registerFrame = (request, sender) ->
(frameIdsForTab[sender.tab.id] ?= []).push request.frameId
unregisterFrame = (request, sender) ->
# When a tab is closing, Chrome sometimes passes messages without sender.tab. Therefore, we guard against
# this.
tabId = sender.tab?.id
return unless tabId?
if frameIdsForTab[tabId]?
if request.tab_is_closing
updateOpenTabs sender.tab, true
else
frameIdsForTab[tabId] = frameIdsForTab[tabId].filter (id) -> id != request.frameId
handleFrameFocused = (request, sender) ->
tabId = sender.tab.id
# Cycle frameIdsForTab to the focused frame. However, also ensure that we don't inadvertently register a
# frame which wasn't previously registered (such as a frameset).
if frameIdsForTab[tabId]?
frameIdsForTab[tabId] = cycleToFrame frameIdsForTab[tabId], request.frameId
# Inform all frames that a frame has received the focus.
chrome.tabs.sendMessage sender.tab.id,
name: "frameFocused"
focusFrameId: request.frameId
# Rotate through frames to the frame count places after frameId.
cycleToFrame = (frames, frameId, count = 0) ->
frames ||= []
# We can't always track which frame chrome has focussed, but here we learn that it's frameId; so add an
# additional offset such that we do indeed start from frameId.
count = (count + Math.max 0, frames.indexOf frameId) % frames.length
[frames[count..]..., frames[0...count]...]
# Send a message to all frames in the current tab.
sendMessageToFrames = (request, sender) ->
chrome.tabs.sendMessage sender.tab.id, request.message
# For debugging only. This allows content scripts to log messages to the background page's console.
bgLog = (request, sender) ->
console.log "#{sender.tab.id}/#{request.frameId}", request.message
# Port handler mapping
portHandlers =
keyDown: handleKeyDown,
completions: handleCompletions
sendRequestHandlers =
getCompletionKeys: getCompletionKeysRequest
getCurrentTabUrl: getCurrentTabUrl
openUrlInNewTab: TabOperations.openUrlInNewTab
openUrlInIncognito: TabOperations.openUrlInIncognito
openUrlInCurrentTab: TabOperations.openUrlInCurrentTab
openOptionsPageInNewTab: openOptionsPageInNewTab
registerFrame: registerFrame
unregisterFrame: unregisterFrame
frameFocused: handleFrameFocused
nextFrame: (request) -> BackgroundCommands.nextFrame 1, request.frameId
updateScrollPosition: handleUpdateScrollPosition
copyToClipboard: copyToClipboard
pasteFromClipboard: pasteFromClipboard
isEnabledForUrl: isEnabledForUrl
selectSpecificTab: selectSpecificTab
createMark: Marks.create.bind(Marks)
gotoMark: Marks.goto.bind(Marks)
setIcon: setIcon
sendMessageToFrames: sendMessageToFrames
log: bgLog
fetchFileContents: (request, sender) -> fetchFileContents request.fileName
# We always remove chrome.storage.local/findModeRawQueryListIncognito on startup.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Remove chrome.storage.local/findModeRawQueryListIncognito if there are no remaining incognito-mode windows.
# Since the common case is that there are none to begin with, we first check whether the key is set at all.
chrome.tabs.onRemoved.addListener (tabId) ->
chrome.storage.local.get "findModeRawQueryListIncognito", (items) ->
if items.findModeRawQueryListIncognito
chrome.windows.getAll null, (windows) ->
for window in windows
return if window.incognito
# There are no remaining incognito-mode tabs, and findModeRawQueryListIncognito is set.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Tidy up tab caches when tabs are removed. We cannot rely on unregisterFrame because Chrome does not always
# provide sender.tab there.
# NOTE(smblott) (2015-05-05) This may break restoreTab on legacy Chrome versions, but we'll be moving to
# chrome.sessions support only soon anyway.
chrome.tabs.onRemoved.addListener (tabId) ->
delete cache[tabId] for cache in [ frameIdsForTab, urlForTab, tabInfoMap ]
# Convenience function for development use.
window.runTests = -> open(chrome.runtime.getURL('tests/dom_tests/dom_tests.html'))
#
# Begin initialization.
#
Commands.clearKeyMappingsAndSetDefaults()
if Settings.has("keyMappings")
Commands.parseCustomKeyMappings(Settings.get("keyMappings"))
populateValidFirstKeys()
populateSingleKeyCommands()
# Show notification on upgrade.
showUpgradeMessage = ->
# Avoid showing the upgrade notification when previousVersion is undefined, which is the case for new
# installs.
Settings.set "previousVersion", currentVersion unless Settings.get "previousVersion"
if Utils.compareVersions(currentVersion, Settings.get "previousVersion" ) == 1
notificationId = "VimiumUpgradeNotification"
notification =
type: "basic"
iconUrl: chrome.runtime.getURL "icons/vimium.png"
title: "Vimium Upgrade"
message: "Vimium has been upgraded to version #{currentVersion}. Click here for more information."
isClickable: true
if chrome.notifications?.create?
chrome.notifications.create notificationId, notification, ->
unless chrome.runtime.lastError
Settings.set "previousVersion", currentVersion
chrome.notifications.onClicked.addListener (id) ->
if id == notificationId
TabOperations.openUrlInNewTab url: "https://github.com/philc/vimium#release-notes"
else
# We need to wait for the user to accept the "notifications" permission.
chrome.permissions.onAdded.addListener showUpgradeMessage
# Ensure that tabInfoMap is populated when Vimium is installed.
chrome.windows.getAll { populate: true }, (windows) ->
for window in windows
for tab in window.tabs
updateOpenTabs(tab)
createScrollPositionHandler = ->
(response) -> updateScrollPosition(tab, response.scrollX, response.scrollY) if response?
chrome.tabs.sendMessage(tab.id, { name: "getScrollPosition" }, createScrollPositionHandler())
showUpgradeMessage()
root.TabOperations = TabOperations
| true | root = exports ? window
# The browser may have tabs already open. We inject the content scripts immediately so that they work straight
# away.
chrome.runtime.onInstalled.addListener ({ reason }) ->
# See https://developer.chrome.com/extensions/runtime#event-onInstalled
return if reason in [ "chrome_update", "shared_module_update" ]
manifest = chrome.runtime.getManifest()
# Content scripts loaded on every page should be in the same group. We assume it is the first.
contentScripts = manifest.content_scripts[0]
jobs = [ [ chrome.tabs.executeScript, contentScripts.js ], [ chrome.tabs.insertCSS, contentScripts.css ] ]
# Chrome complains if we don't evaluate chrome.runtime.lastError on errors (and we get errors for tabs on
# which Vimium cannot run).
checkLastRuntimeError = -> chrome.runtime.lastError
chrome.tabs.query { status: "complete" }, (tabs) ->
for tab in tabs
for [ func, files ] in jobs
for file in files
func tab.id, { file: file, allFrames: contentScripts.all_frames }, checkLastRuntimeError
currentVersion = Utils.getCurrentVersion()
tabQueue = {} # windowId -> Array
tabInfoMap = {} # tabId -> object with various tab properties
keyQueue = "" # Queue of keys typed
validFirstKeys = {}
singleKeyCommands = []
focusedFrame = null
frameIdsForTab = {}
root.urlForTab = {}
# Keys are either literal characters, or "named" - for example <a-b> (alt+b), <left> (left arrow) or <f12>
# This regular expression captures two groups: the first is a named key, the second is the remainder of
# the string.
namedKeyRegex = /^(<(?:[amc]-.|(?:[amc]-)?[a-z0-9]{2,5})>)(.*)$/
# Event handlers
selectionChangedHandlers = []
# Note. tabLoadedHandlers handlers is exported for use also by "marks.coffee".
root.tabLoadedHandlers = {} # tabId -> function()
# A secret, available only within the current instantiation of Vimium. The secret is big, likely unguessable
# in practice, but less than 2^31.
chrome.storage.local.set
vimiumSecret: Math.floor Math.random() * 2000000000
completionSources =
bookmarks: new BookmarkCompleter
history: new HistoryCompleter
domains: new DomainCompleter
tabs: new TabCompleter
searchEngines: new SearchEngineCompleter
completers =
omni: new MultiCompleter [
completionSources.bookmarks
completionSources.history
completionSources.domains
completionSources.searchEngines
]
bookmarks: new MultiCompleter [completionSources.bookmarks]
tabs: new MultiCompleter [completionSources.tabs]
completionHandlers =
filter: (completer, request, port) ->
completer.filter request, (response) ->
# We use try here because this may fail if the sender has already navigated away from the original page.
# This can happen, for example, when posting completion suggestions from the SearchEngineCompleter
# (which is done asynchronously).
try
port.postMessage extend request, extend response, handler: "completions"
refresh: (completer, _, port) -> completer.refresh port
cancel: (completer, _, port) -> completer.cancel port
handleCompletions = (request, port) ->
completionHandlers[request.handler] completers[request.name], request, port
chrome.runtime.onConnect.addListener (port, name) ->
senderTabId = if port.sender.tab then port.sender.tab.id else null
# If this is a tab we've been waiting to open, execute any "tab loaded" handlers, e.g. to restore
# the tab's scroll position. Wait until domReady before doing this; otherwise operations like restoring
# the scroll position will not be possible.
if (port.name == "domReady" && senderTabId != null)
if (tabLoadedHandlers[senderTabId])
toCall = tabLoadedHandlers[senderTabId]
# Delete first to be sure there's no circular events.
delete tabLoadedHandlers[senderTabId]
toCall.call()
if (portHandlers[port.name])
port.onMessage.addListener(portHandlers[port.name])
chrome.runtime.onMessage.addListener((request, sender, sendResponse) ->
if (sendRequestHandlers[request.handler])
sendResponse(sendRequestHandlers[request.handler](request, sender))
# Ensure the sendResponse callback is freed.
return false)
#
# Used by the content scripts to get their full URL. This is needed for URLs like "view-source:http:# .."
# because window.location doesn't know anything about the Chrome-specific "view-source:".
#
getCurrentTabUrl = (request, sender) -> sender.tab.url
#
# Checks the user's preferences in local storage to determine if Vimium is enabled for the given URL, and
# whether any keys should be passed through to the underlying page.
# The source frame also informs us whether or not it has the focus, which allows us to track the URL of the
# active frame.
#
root.isEnabledForUrl = isEnabledForUrl = (request, sender) ->
urlForTab[sender.tab.id] = request.url if request.frameIsFocused
rule = Exclusions.getRule(request.url)
{
isEnabledForUrl: not rule or rule.passKeys
passKeys: rule?.passKeys or ""
}
onURLChange = (details) ->
chrome.tabs.sendMessage details.tabId, name: "checkEnabledAfterURLChange"
# Re-check whether Vimium is enabled for a frame when the url changes without a reload.
chrome.webNavigation.onHistoryStateUpdated.addListener onURLChange # history.pushState.
chrome.webNavigation.onReferenceFragmentUpdated.addListener onURLChange # Hash changed.
# Retrieves the help dialog HTML template from a file, and populates it with the latest keybindings.
# This is called by options.coffee.
root.helpDialogHtml = (showUnboundCommands, showCommandNames, customTitle) ->
commandsToKey = {}
for key of Commands.keyToCommandRegistry
command = Commands.keyToCommandRegistry[key].command
commandsToKey[command] = (commandsToKey[command] || []).concat(key)
dialogHtml = fetchFileContents("pages/help_dialog.html")
for group of Commands.commandGroups
dialogHtml = dialogHtml.replace("{{#{group}}}",
helpDialogHtmlForCommandGroup(group, commandsToKey, Commands.availableCommands,
showUnboundCommands, showCommandNames))
dialogHtml = dialogHtml.replace("{{version}}", currentVersion)
dialogHtml = dialogHtml.replace("{{title}}", customTitle || "Help")
dialogHtml
#
# Generates HTML for a given set of commands. commandGroups are defined in commands.js
#
helpDialogHtmlForCommandGroup = (group, commandsToKey, availableCommands,
showUnboundCommands, showCommandNames) ->
html = []
for command in Commands.commandGroups[group]
bindings = (commandsToKey[command] || [""]).join(", ")
if (showUnboundCommands || commandsToKey[command])
isAdvanced = Commands.advancedCommands.indexOf(command) >= 0
description = availableCommands[command].description
if bindings.length < 12
helpDialogHtmlForCommand html, isAdvanced, bindings, description, showCommandNames, command
else
# If the length of the bindings is too long, then we display the bindings on a separate row from the
# description. This prevents the column alignment from becoming out of whack.
helpDialogHtmlForCommand html, isAdvanced, bindings, "", false, ""
helpDialogHtmlForCommand html, isAdvanced, "", description, showCommandNames, command
html.join("\n")
helpDialogHtmlForCommand = (html, isAdvanced, bindings, description, showCommandNames, command) ->
html.push "<tr class='vimiumReset #{"advanced" if isAdvanced}'>"
if description
html.push "<td class='vimiumReset'>", Utils.escapeHtml(bindings), "</td>"
html.push "<td class='vimiumReset'>#{if description and bindings then ':' else ''}</td><td class='vimiumReset'>", description
html.push("<span class='vimiumReset commandName'>(#{command})</span>") if showCommandNames
else
html.push "<td class='vimiumReset' colspan='3' style='text-align: left;'>", Utils.escapeHtml(bindings)
html.push("</td></tr>")
#
# Fetches the contents of a file bundled with this extension.
#
fetchFileContents = (extensionFileName) ->
req = new XMLHttpRequest()
req.open("GET", chrome.runtime.getURL(extensionFileName), false) # false => synchronous
req.send()
req.responseText
#
# Returns the keys that can complete a valid command given the current key queue.
#
getCompletionKeysRequest = (request, keysToCheck = "") ->
name: "refreshCompletionKeys"
completionKeys: generateCompletionKeys(keysToCheck)
validFirstKeys: validFirstKeys
TabOperations =
# Opens the url in the current tab.
openUrlInCurrentTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
callback = (->) unless typeof callback == "function"
chrome.tabs.update tab.id, { url: Utils.convertToUrl(request.url) }, callback
# Opens request.url in new tab and switches to it if request.selected is true.
openUrlInNewTab: (request, callback = (->)) ->
chrome.tabs.getSelected null, (tab) ->
tabConfig =
url: Utils.convertToUrl request.url
index: tab.index + 1
selected: true
windowId: tab.windowId
openerTabId: tab.id
callback = (->) unless typeof callback == "function"
chrome.tabs.create tabConfig, callback
openUrlInIncognito: (request, callback = (->)) ->
callback = (->) unless typeof callback == "function"
chrome.windows.create {url: Utils.convertToUrl(request.url), incognito: true}, callback
#
# Copies or pastes some data (request.data) to/from the clipboard.
# We return null to avoid the return value from the copy operations being passed to sendResponse.
#
copyToClipboard = (request) -> Clipboard.copy(request.data); null
pasteFromClipboard = (request) -> Clipboard.paste()
#
# Selects the tab with the ID specified in request.id
#
selectSpecificTab = (request) ->
chrome.tabs.get(request.id, (tab) ->
chrome.windows.update(tab.windowId, { focused: true })
chrome.tabs.update(request.id, { selected: true }))
chrome.tabs.onSelectionChanged.addListener (tabId, selectionInfo) ->
if (selectionChangedHandlers.length > 0)
selectionChangedHandlers.pop().call()
repeatFunction = (func, totalCount, currentCount, frameId) ->
if (currentCount < totalCount)
func(
-> repeatFunction(func, totalCount, currentCount + 1, frameId),
frameId)
moveTab = (count) ->
chrome.tabs.getAllInWindow null, (tabs) ->
pinnedCount = (tabs.filter (tab) -> tab.pinned).length
chrome.tabs.getSelected null, (tab) ->
minIndex = if tab.pinned then 0 else pinnedCount
maxIndex = (if tab.pinned then pinnedCount else tabs.length) - 1
chrome.tabs.move tab.id,
index: Math.max minIndex, Math.min maxIndex, tab.index + count
# Start action functions
# These are commands which are bound to keystroke which must be handled by the background page. They are
# mapped in commands.coffee.
BackgroundCommands =
createTab: (callback) ->
chrome.tabs.query { active: true, currentWindow: true }, (tabs) ->
tab = tabs[0]
url = Settings.get "newTabUrl"
if url == "pages/blank.html"
# "pages/blank.html" does not work in incognito mode, so fall back to "chrome://newtab" instead.
url = if tab.incognito then "chrome://newtab" else chrome.runtime.getURL url
TabOperations.openUrlInNewTab { url }, callback
duplicateTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.duplicate(tab.id)
selectionChangedHandlers.push(callback))
moveTabToNewWindow: (callback) ->
chrome.tabs.query {active: true, currentWindow: true}, (tabs) ->
tab = tabs[0]
chrome.windows.create {tabId: tab.id, incognito: tab.incognito}
nextTab: (count) -> selectTab "next", count
previousTab: (count) -> selectTab "previous", count
firstTab: (count) -> selectTab "first", count
lastTab: (count) -> selectTab "last", count
removeTab: (callback) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.remove(tab.id)
selectionChangedHandlers.push(callback))
restoreTab: (callback) ->
# TODO: remove if-else -block when adopted into stable
if chrome.sessions
chrome.sessions.restore(null, (restoredSession) ->
callback() unless chrome.runtime.lastError)
else
# TODO(ilyPI:NAME:<NAME>END_PI): Should this be getLastFocused instead?
chrome.windows.getCurrent((window) ->
return unless (tabQueue[window.id] && tabQueue[window.id].length > 0)
tabQueueEntry = tabQueue[window.id].pop()
# Clean out the tabQueue so we don't have unused windows laying about.
delete tabQueue[window.id] if (tabQueue[window.id].length == 0)
# We have to chain a few callbacks to set the appropriate scroll position. We can't just wait until the
# tab is created because the content script is not available during the "loading" state. We need to
# wait until that's over before we can call setScrollPosition.
chrome.tabs.create({ url: tabQueueEntry.url, index: tabQueueEntry.positionIndex }, (tab) ->
tabLoadedHandlers[tab.id] = ->
chrome.tabs.sendRequest(tab.id,
name: "setScrollPosition",
scrollX: tabQueueEntry.scrollX,
scrollY: tabQueueEntry.scrollY)
callback()))
openCopiedUrlInCurrentTab: (request) -> TabOperations.openUrlInCurrentTab({ url: Clipboard.paste() })
openCopiedUrlInNewTab: (request) -> TabOperations.openUrlInNewTab({ url: Clipboard.paste() })
togglePinTab: (request) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.update(tab.id, { pinned: !tab.pinned }))
showHelp: (callback, frameId) ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.sendMessage(tab.id,
{ name: "toggleHelpDialog", dialogHtml: helpDialogHtml(), frameId:frameId }))
moveTabLeft: (count) -> moveTab -count
moveTabRight: (count) -> moveTab count
nextFrame: (count,frameId) ->
chrome.tabs.getSelected null, (tab) ->
frameIdsForTab[tab.id] = cycleToFrame frameIdsForTab[tab.id], frameId, count
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: frameIdsForTab[tab.id][0], highlight: true
mainFrame: ->
chrome.tabs.getSelected null, (tab) ->
# The front end interprets a frameId of 0 to mean the main/top from.
chrome.tabs.sendMessage tab.id, name: "focusFrame", frameId: 0, highlight: true
closeTabsOnLeft: -> removeTabsRelative "before"
closeTabsOnRight: -> removeTabsRelative "after"
closeOtherTabs: -> removeTabsRelative "both"
# Remove tabs before, after, or either side of the currently active tab
removeTabsRelative = (direction) ->
chrome.tabs.query {currentWindow: true}, (tabs) ->
chrome.tabs.query {currentWindow: true, active: true}, (activeTabs) ->
activeTabIndex = activeTabs[0].index
shouldDelete = switch direction
when "before"
(index) -> index < activeTabIndex
when "after"
(index) -> index > activeTabIndex
when "both"
(index) -> index != activeTabIndex
toRemove = []
for tab in tabs
if not tab.pinned and shouldDelete tab.index
toRemove.push tab.id
chrome.tabs.remove toRemove
# Selects a tab before or after the currently selected tab.
# - direction: "next", "previous", "first" or "last".
selectTab = (direction, count = 1) ->
chrome.tabs.getAllInWindow null, (tabs) ->
return unless tabs.length > 1
chrome.tabs.getSelected null, (currentTab) ->
toSelect =
switch direction
when "next"
(currentTab.index + count) % tabs.length
when "previous"
(currentTab.index - count + count * tabs.length) % tabs.length
when "first"
Math.min tabs.length - 1, count - 1
when "last"
Math.max 0, tabs.length - count
chrome.tabs.update tabs[toSelect].id, selected: true
updateOpenTabs = (tab, deleteFrames = false) ->
# Chrome might reuse the tab ID of a recently removed tab.
if tabInfoMap[tab.id]?.deletor
clearTimeout tabInfoMap[tab.id].deletor
tabInfoMap[tab.id] =
url: tab.url
positionIndex: tab.index
windowId: tab.windowId
scrollX: null
scrollY: null
deletor: null
# Frames are recreated on refresh
delete frameIdsForTab[tab.id] if deleteFrames
# Here's how we set the page icon. The default is "disabled", so if we do nothing else, then we get the
# grey-out disabled icon. Thereafter, we only set tab-specific icons, so there's no need to update the icon
# when we visit a tab on which Vimium isn't running.
#
# For active tabs, when a frame starts, it requests its active state via isEnabledForUrl. We also check the
# state every time a frame gets the focus. In both cases, the frame then updates the tab's icon accordingly.
#
# Exclusion rule changes (from either the options page or the page popup) propagate via the subsequent focus
# change. In particular, whenever a frame next gets the focus, it requests its new state and sets the icon
# accordingly.
#
setIcon = (request, sender) ->
path = switch request.icon
when "enabled" then "icons/browser_action_enabled.png"
when "partial" then "icons/browser_action_partial.png"
when "disabled" then "icons/browser_action_disabled.png"
chrome.browserAction.setIcon tabId: sender.tab.id, path: path
handleUpdateScrollPosition = (request, sender) ->
# See note regarding sender.tab at unregisterFrame.
updateScrollPosition sender.tab, request.scrollX, request.scrollY if sender.tab?
updateScrollPosition = (tab, scrollX, scrollY) ->
tabInfoMap[tab.id].scrollX = scrollX
tabInfoMap[tab.id].scrollY = scrollY
chrome.tabs.onUpdated.addListener (tabId, changeInfo, tab) ->
return unless changeInfo.status == "loading" # only do this once per URL change
cssConf =
allFrames: true
code: Settings.get("userDefinedLinkHintCss")
runAt: "document_start"
chrome.tabs.insertCSS tabId, cssConf, -> chrome.runtime.lastError
updateOpenTabs(tab) if changeInfo.url?
chrome.tabs.onAttached.addListener (tabId, attachedInfo) ->
# We should update all the tabs in the old window and the new window.
if tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(tabInfoMap[tabId].windowId)
updatePositionsAndWindowsForAllTabsInWindow(attachedInfo.newWindowId)
chrome.tabs.onMoved.addListener (tabId, moveInfo) ->
updatePositionsAndWindowsForAllTabsInWindow(moveInfo.windowId)
chrome.tabs.onRemoved.addListener (tabId) ->
openTabInfo = tabInfoMap[tabId]
updatePositionsAndWindowsForAllTabsInWindow(openTabInfo.windowId)
# If we restore pages that content scripts can't run on, they'll ignore Vimium keystrokes when they
# reappear. Pretend they never existed and adjust tab indices accordingly. Could possibly expand this into
# a blacklist in the future.
unless chrome.sessions
if (/^(chrome|view-source:)[^:]*:\/\/.*/.test(openTabInfo.url))
for i of tabQueue[openTabInfo.windowId]
if (tabQueue[openTabInfo.windowId][i].positionIndex > openTabInfo.positionIndex)
tabQueue[openTabInfo.windowId][i].positionIndex--
return
if (tabQueue[openTabInfo.windowId])
tabQueue[openTabInfo.windowId].push(openTabInfo)
else
tabQueue[openTabInfo.windowId] = [openTabInfo]
# keep the reference around for a while to wait for the last messages from the closed tab (e.g. for updating
# scroll position)
tabInfoMap.deletor = -> delete tabInfoMap[tabId]
setTimeout tabInfoMap.deletor, 1000
delete frameIdsForTab[tabId]
delete urlForTab[tabId]
unless chrome.sessions
chrome.windows.onRemoved.addListener (windowId) -> delete tabQueue[windowId]
# End action functions
updatePositionsAndWindowsForAllTabsInWindow = (windowId) ->
chrome.tabs.getAllInWindow(windowId, (tabs) ->
for tab in tabs
openTabInfo = tabInfoMap[tab.id]
if (openTabInfo)
openTabInfo.positionIndex = tab.index
openTabInfo.windowId = tab.windowId)
splitKeyIntoFirstAndSecond = (key) ->
if (key.search(namedKeyRegex) == 0)
{ first: RegExp.$1, second: RegExp.$2 }
else
{ first: key[0], second: key.slice(1) }
getActualKeyStrokeLength = (key) ->
if (key.search(namedKeyRegex) == 0)
1 + getActualKeyStrokeLength(RegExp.$2)
else
key.length
populateValidFirstKeys = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 2)
validFirstKeys[splitKeyIntoFirstAndSecond(key).first] = true
populateSingleKeyCommands = ->
for key of Commands.keyToCommandRegistry
if (getActualKeyStrokeLength(key) == 1)
singleKeyCommands.push(key)
# Invoked by options.coffee.
root.refreshCompletionKeysAfterMappingSave = ->
validFirstKeys = {}
singleKeyCommands = []
populateValidFirstKeys()
populateSingleKeyCommands()
sendRequestToAllTabs(getCompletionKeysRequest())
# Generates a list of keys that can complete a valid command given the current key queue or the one passed in
generateCompletionKeys = (keysToCheck) ->
splitHash = splitKeyQueue(keysToCheck || keyQueue)
command = splitHash.command
count = splitHash.count
completionKeys = singleKeyCommands.slice(0)
if (getActualKeyStrokeLength(command) == 1)
for key of Commands.keyToCommandRegistry
splitKey = splitKeyIntoFirstAndSecond(key)
if (splitKey.first == command)
completionKeys.push(splitKey.second)
completionKeys
splitKeyQueue = (queue) ->
match = /([1-9][0-9]*)?(.*)/.exec(queue)
count = parseInt(match[1], 10)
command = match[2]
{ count: count, command: command }
handleKeyDown = (request, port) ->
key = request.keyChar
if (key == "<ESC>")
console.log("clearing keyQueue")
keyQueue = ""
else
console.log("checking keyQueue: [", keyQueue + key, "]")
keyQueue = checkKeyQueue(keyQueue + key, port.sender.tab.id, request.frameId)
console.log("new KeyQueue: " + keyQueue)
# Tell the content script whether there are keys in the queue.
# FIXME: There is a race condition here. The behaviour in the content script depends upon whether this message gets
# back there before or after the next keystroke.
# That being said, I suspect there are other similar race conditions here, for example in checkKeyQueue().
# PI:NAME:<NAME>END_PI (23 Aug, 14).
chrome.tabs.sendMessage(port.sender.tab.id,
name: "currentKeyQueue",
keyQueue: keyQueue)
checkKeyQueue = (keysToCheck, tabId, frameId) ->
refreshedCompletionKeys = false
splitHash = splitKeyQueue(keysToCheck)
command = splitHash.command
count = splitHash.count
return keysToCheck if command.length == 0
count = 1 if isNaN(count)
if (Commands.keyToCommandRegistry[command])
registryEntry = Commands.keyToCommandRegistry[command]
runCommand = true
if registryEntry.noRepeat
count = 1
else if registryEntry.repeatLimit and count > registryEntry.repeatLimit
runCommand = confirm """
You have asked Vimium to perform #{count} repeats of the command:
#{Commands.availableCommands[registryEntry.command].description}
Are you sure you want to continue?
"""
if runCommand
if not registryEntry.isBackgroundCommand
chrome.tabs.sendMessage tabId,
name: "executePageCommand"
command: registryEntry.command
frameId: frameId
count: count
completionKeys: generateCompletionKeys ""
registryEntry: registryEntry
refreshedCompletionKeys = true
else
if registryEntry.passCountToFunction
BackgroundCommands[registryEntry.command](count, frameId)
else if registryEntry.noRepeat
BackgroundCommands[registryEntry.command](frameId)
else
repeatFunction(BackgroundCommands[registryEntry.command], count, 0, frameId)
newKeyQueue = ""
else if (getActualKeyStrokeLength(command) > 1)
splitKey = splitKeyIntoFirstAndSecond(command)
# The second key might be a valid command by its self.
if (Commands.keyToCommandRegistry[splitKey.second])
newKeyQueue = checkKeyQueue(splitKey.second, tabId, frameId)
else
newKeyQueue = (if validFirstKeys[splitKey.second] then splitKey.second else "")
else
newKeyQueue = (if validFirstKeys[command] then count.toString() + command else "")
# If we haven't sent the completion keys piggybacked on executePageCommand,
# send them by themselves.
unless refreshedCompletionKeys
chrome.tabs.sendMessage(tabId, getCompletionKeysRequest(null, newKeyQueue), null)
newKeyQueue
#
# Message all tabs. Args should be the arguments hash used by the Chrome sendRequest API.
#
sendRequestToAllTabs = (args) ->
chrome.windows.getAll({ populate: true }, (windows) ->
for window in windows
for tab in window.tabs
chrome.tabs.sendMessage(tab.id, args, null))
openOptionsPageInNewTab = ->
chrome.tabs.getSelected(null, (tab) ->
chrome.tabs.create({ url: chrome.runtime.getURL("pages/options.html"), index: tab.index + 1 }))
registerFrame = (request, sender) ->
(frameIdsForTab[sender.tab.id] ?= []).push request.frameId
unregisterFrame = (request, sender) ->
# When a tab is closing, Chrome sometimes passes messages without sender.tab. Therefore, we guard against
# this.
tabId = sender.tab?.id
return unless tabId?
if frameIdsForTab[tabId]?
if request.tab_is_closing
updateOpenTabs sender.tab, true
else
frameIdsForTab[tabId] = frameIdsForTab[tabId].filter (id) -> id != request.frameId
handleFrameFocused = (request, sender) ->
tabId = sender.tab.id
# Cycle frameIdsForTab to the focused frame. However, also ensure that we don't inadvertently register a
# frame which wasn't previously registered (such as a frameset).
if frameIdsForTab[tabId]?
frameIdsForTab[tabId] = cycleToFrame frameIdsForTab[tabId], request.frameId
# Inform all frames that a frame has received the focus.
chrome.tabs.sendMessage sender.tab.id,
name: "frameFocused"
focusFrameId: request.frameId
# Rotate through frames to the frame count places after frameId.
cycleToFrame = (frames, frameId, count = 0) ->
frames ||= []
# We can't always track which frame chrome has focussed, but here we learn that it's frameId; so add an
# additional offset such that we do indeed start from frameId.
count = (count + Math.max 0, frames.indexOf frameId) % frames.length
[frames[count..]..., frames[0...count]...]
# Send a message to all frames in the current tab.
sendMessageToFrames = (request, sender) ->
chrome.tabs.sendMessage sender.tab.id, request.message
# For debugging only. This allows content scripts to log messages to the background page's console.
bgLog = (request, sender) ->
console.log "#{sender.tab.id}/#{request.frameId}", request.message
# Port handler mapping
portHandlers =
keyDown: handleKeyDown,
completions: handleCompletions
sendRequestHandlers =
getCompletionKeys: getCompletionKeysRequest
getCurrentTabUrl: getCurrentTabUrl
openUrlInNewTab: TabOperations.openUrlInNewTab
openUrlInIncognito: TabOperations.openUrlInIncognito
openUrlInCurrentTab: TabOperations.openUrlInCurrentTab
openOptionsPageInNewTab: openOptionsPageInNewTab
registerFrame: registerFrame
unregisterFrame: unregisterFrame
frameFocused: handleFrameFocused
nextFrame: (request) -> BackgroundCommands.nextFrame 1, request.frameId
updateScrollPosition: handleUpdateScrollPosition
copyToClipboard: copyToClipboard
pasteFromClipboard: pasteFromClipboard
isEnabledForUrl: isEnabledForUrl
selectSpecificTab: selectSpecificTab
createMark: Marks.create.bind(Marks)
gotoMark: Marks.goto.bind(Marks)
setIcon: setIcon
sendMessageToFrames: sendMessageToFrames
log: bgLog
fetchFileContents: (request, sender) -> fetchFileContents request.fileName
# We always remove chrome.storage.local/findModeRawQueryListIncognito on startup.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Remove chrome.storage.local/findModeRawQueryListIncognito if there are no remaining incognito-mode windows.
# Since the common case is that there are none to begin with, we first check whether the key is set at all.
chrome.tabs.onRemoved.addListener (tabId) ->
chrome.storage.local.get "findModeRawQueryListIncognito", (items) ->
if items.findModeRawQueryListIncognito
chrome.windows.getAll null, (windows) ->
for window in windows
return if window.incognito
# There are no remaining incognito-mode tabs, and findModeRawQueryListIncognito is set.
chrome.storage.local.remove "findModeRawQueryListIncognito"
# Tidy up tab caches when tabs are removed. We cannot rely on unregisterFrame because Chrome does not always
# provide sender.tab there.
# NOTE(smblott) (2015-05-05) This may break restoreTab on legacy Chrome versions, but we'll be moving to
# chrome.sessions support only soon anyway.
chrome.tabs.onRemoved.addListener (tabId) ->
delete cache[tabId] for cache in [ frameIdsForTab, urlForTab, tabInfoMap ]
# Convenience function for development use.
window.runTests = -> open(chrome.runtime.getURL('tests/dom_tests/dom_tests.html'))
#
# Begin initialization.
#
Commands.clearKeyMappingsAndSetDefaults()
if Settings.has("keyMappings")
Commands.parseCustomKeyMappings(Settings.get("keyMappings"))
populateValidFirstKeys()
populateSingleKeyCommands()
# Show notification on upgrade.
showUpgradeMessage = ->
# Avoid showing the upgrade notification when previousVersion is undefined, which is the case for new
# installs.
Settings.set "previousVersion", currentVersion unless Settings.get "previousVersion"
if Utils.compareVersions(currentVersion, Settings.get "previousVersion" ) == 1
notificationId = "VimiumUpgradeNotification"
notification =
type: "basic"
iconUrl: chrome.runtime.getURL "icons/vimium.png"
title: "Vimium Upgrade"
message: "Vimium has been upgraded to version #{currentVersion}. Click here for more information."
isClickable: true
if chrome.notifications?.create?
chrome.notifications.create notificationId, notification, ->
unless chrome.runtime.lastError
Settings.set "previousVersion", currentVersion
chrome.notifications.onClicked.addListener (id) ->
if id == notificationId
TabOperations.openUrlInNewTab url: "https://github.com/philc/vimium#release-notes"
else
# We need to wait for the user to accept the "notifications" permission.
chrome.permissions.onAdded.addListener showUpgradeMessage
# Ensure that tabInfoMap is populated when Vimium is installed.
chrome.windows.getAll { populate: true }, (windows) ->
for window in windows
for tab in window.tabs
updateOpenTabs(tab)
createScrollPositionHandler = ->
(response) -> updateScrollPosition(tab, response.scrollX, response.scrollY) if response?
chrome.tabs.sendMessage(tab.id, { name: "getScrollPosition" }, createScrollPositionHandler())
showUpgradeMessage()
root.TabOperations = TabOperations
|
[
{
"context": " CleverStyle Music\n * @category app\n * @author Nazar Mokrynskyi <nazar@mokrynskyi.com>\n * @copyright Copyright (c",
"end": 85,
"score": 0.9998916387557983,
"start": 69,
"tag": "NAME",
"value": "Nazar Mokrynskyi"
},
{
"context": " * @category app\n * @author N... | js/ac.music_library.coffee | mariot/Klif-Mozika | 0 | ###*
* @package CleverStyle Music
* @category app
* @author Nazar Mokrynskyi <nazar@mokrynskyi.com>
* @copyright Copyright (c) 2014-2015, Nazar Mokrynskyi
* @license MIT License, see license.txt
###
if !window.indexedDB
alert "Indexed DB is not supported O_o"
return
db = null
on_db_ready = []
music_storage = navigator.getDeviceStorage('music')
request = indexedDB.open('music_db', 1)
request.onsuccess = ->
db = request.result
while callback = on_db_ready.shift()
callback()
return
request.onerror = (e) ->
console.error(e)
return
request.onupgradeneeded = ->
db = request.result
if db.objectStoreNames.contains('music')
db.deleteObjectStore('music')
music_store = db.createObjectStore(
'music'
keyPath : 'id'
autoIncrement : true
)
music_store.createIndex(
'name'
'name'
unique : true
)
meta_store = db.createObjectStore(
'meta'
keyPath : 'id'
)
meta_store.createIndex('title', 'title')
meta_store.createIndex('artist', 'artist')
meta_store.createIndex('album', 'album')
meta_store.createIndex('genre', 'genre')
meta_store.createIndex('year', 'year')
db.transaction.oncomplete = ->
while callback = on_db_ready.shift()
callback()
return
library_size = -1
cs.music_library =
add : (name, callback) ->
callback = (callback || ->).bind(@)
@onready ->
put_transaction = db
.transaction(['music'], 'readwrite')
.objectStore('music')
.put(
name : name
)
put_transaction.onsuccess = callback
put_transaction.onerror = callback
parse_metadata : (name, callback) ->
callback = (callback || ->).bind(@)
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(name).onsuccess = ->
if @result
data = @result
music_storage.get(data.name).onsuccess = ->
if @result
store = (metadata) ->
store_object = db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.put(
id : data.id
title : metadata.title || ''
artist : metadata.artist || ''
album : metadata.album || ''
genre : metadata.genre || ''
year : metadata.year || metadata.recordingTime || ''
rated : metadata.rated || 0
)
store_object.onsuccess = ->
callback()
store_object.onerror = ->
callback()
parseAudioMetadata(
@result
(metadata) ->
store(metadata)
=>
# If unable to get metadata with previous parser - try another one
url = URL.createObjectURL(@result)
asset = AV.Asset.fromURL(url)
asset.get('metadata', (metadata) ->
URL.revokeObjectURL(url)
if !metadata
callback()
return
store(metadata)
)
asset.on('error', ->
# Get filename
metadata = data.name.split('/').pop()
# remove extension
metadata = metadata.split('.')
metadata.pop()
metadata = metadata.join('.')
# Try to split filename on artist and title
metadata = metadata.split('–', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Second trial
metadata = metadata[0].split(' - ', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Assume that filename is title
store(
title : $.trim(metadata[0])
)
)
)
get : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['music'])
.objectStore('music')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
get_meta : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['meta'])
.objectStore('meta')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
else
callback(
id : id
)
get_all : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
all = []
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if filter(result.value)
all.push(result.value)
result.continue()
else
callback(all)
del : (id, callback) ->
@onready ->
db
.transaction(['music'], 'readwrite')
.objectStore('music')
.delete(id)
.onsuccess = ->
db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.delete(id)
.onsuccess = ->
callback()
size : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
if library_size >= 0 && !filter
callback(library_size)
calculated_size = 0
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if !filter || filter(result.value)
++calculated_size
result.continue()
else
if !filter
library_size = calculated_size
callback(calculated_size)
rescan : (done_callback) ->
known_extensions = [
'mp3',
'wave',
'm4a',
'm4b',
'm4p',
'm4r',
'3gp',
'mp4',
'aac',
'ogg',
'oga',
'opus',
'flac',
'alac'
]
done_callback = (done_callback || ->).bind(@)
found_files = 0
@onready ->
new_files = []
remove_old_files = =>
@get_all (all) =>
id_to_remove = []
all.forEach (file) =>
if file.name not in new_files
id_to_remove.push(file.id)
return
remove = (index) =>
if id_to_remove[index]
@del(id_to_remove[index], ->
remove(index + 1)
)
else
done_callback()
remove(0)
do =>
cursor = music_storage.enumerate()
cursor.onsuccess = =>
if cursor.result
file = cursor.result
if known_extensions.indexOf(file.name.split('.').pop()) != -1
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(file.name).onsuccess = (e) =>
if !e.target.result
@add(file.name, ->
@parse_metadata(file.name, ->
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
)
)
else
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
else
cursor.continue()
else
if !new_files.length
alert _('no_files_found')
else
remove_old_files()
cursor.onerror = ->
console.error(@error.name)
return
onready : (callback) ->
callback = (callback || ->).bind(@)
if db
callback()
else
on_db_ready.push(callback)
return
| 166599 | ###*
* @package CleverStyle Music
* @category app
* @author <NAME> <<EMAIL>>
* @copyright Copyright (c) 2014-2015, <NAME>
* @license MIT License, see license.txt
###
if !window.indexedDB
alert "Indexed DB is not supported O_o"
return
db = null
on_db_ready = []
music_storage = navigator.getDeviceStorage('music')
request = indexedDB.open('music_db', 1)
request.onsuccess = ->
db = request.result
while callback = on_db_ready.shift()
callback()
return
request.onerror = (e) ->
console.error(e)
return
request.onupgradeneeded = ->
db = request.result
if db.objectStoreNames.contains('music')
db.deleteObjectStore('music')
music_store = db.createObjectStore(
'music'
keyPath : 'id'
autoIncrement : true
)
music_store.createIndex(
'name'
'name'
unique : true
)
meta_store = db.createObjectStore(
'meta'
keyPath : 'id'
)
meta_store.createIndex('title', 'title')
meta_store.createIndex('artist', 'artist')
meta_store.createIndex('album', 'album')
meta_store.createIndex('genre', 'genre')
meta_store.createIndex('year', 'year')
db.transaction.oncomplete = ->
while callback = on_db_ready.shift()
callback()
return
library_size = -1
cs.music_library =
add : (name, callback) ->
callback = (callback || ->).bind(@)
@onready ->
put_transaction = db
.transaction(['music'], 'readwrite')
.objectStore('music')
.put(
name : name
)
put_transaction.onsuccess = callback
put_transaction.onerror = callback
parse_metadata : (name, callback) ->
callback = (callback || ->).bind(@)
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(name).onsuccess = ->
if @result
data = @result
music_storage.get(data.name).onsuccess = ->
if @result
store = (metadata) ->
store_object = db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.put(
id : data.id
title : metadata.title || ''
artist : metadata.artist || ''
album : metadata.album || ''
genre : metadata.genre || ''
year : metadata.year || metadata.recordingTime || ''
rated : metadata.rated || 0
)
store_object.onsuccess = ->
callback()
store_object.onerror = ->
callback()
parseAudioMetadata(
@result
(metadata) ->
store(metadata)
=>
# If unable to get metadata with previous parser - try another one
url = URL.createObjectURL(@result)
asset = AV.Asset.fromURL(url)
asset.get('metadata', (metadata) ->
URL.revokeObjectURL(url)
if !metadata
callback()
return
store(metadata)
)
asset.on('error', ->
# Get filename
metadata = data.name.split('/').pop()
# remove extension
metadata = metadata.split('.')
metadata.pop()
metadata = metadata.join('.')
# Try to split filename on artist and title
metadata = metadata.split('–', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Second trial
metadata = metadata[0].split(' - ', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Assume that filename is title
store(
title : $.trim(metadata[0])
)
)
)
get : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['music'])
.objectStore('music')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
get_meta : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['meta'])
.objectStore('meta')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
else
callback(
id : id
)
get_all : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
all = []
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if filter(result.value)
all.push(result.value)
result.continue()
else
callback(all)
del : (id, callback) ->
@onready ->
db
.transaction(['music'], 'readwrite')
.objectStore('music')
.delete(id)
.onsuccess = ->
db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.delete(id)
.onsuccess = ->
callback()
size : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
if library_size >= 0 && !filter
callback(library_size)
calculated_size = 0
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if !filter || filter(result.value)
++calculated_size
result.continue()
else
if !filter
library_size = calculated_size
callback(calculated_size)
rescan : (done_callback) ->
known_extensions = [
'mp3',
'wave',
'm4a',
'm4b',
'm4p',
'm4r',
'3gp',
'mp4',
'aac',
'ogg',
'oga',
'opus',
'flac',
'alac'
]
done_callback = (done_callback || ->).bind(@)
found_files = 0
@onready ->
new_files = []
remove_old_files = =>
@get_all (all) =>
id_to_remove = []
all.forEach (file) =>
if file.name not in new_files
id_to_remove.push(file.id)
return
remove = (index) =>
if id_to_remove[index]
@del(id_to_remove[index], ->
remove(index + 1)
)
else
done_callback()
remove(0)
do =>
cursor = music_storage.enumerate()
cursor.onsuccess = =>
if cursor.result
file = cursor.result
if known_extensions.indexOf(file.name.split('.').pop()) != -1
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(file.name).onsuccess = (e) =>
if !e.target.result
@add(file.name, ->
@parse_metadata(file.name, ->
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
)
)
else
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
else
cursor.continue()
else
if !new_files.length
alert _('no_files_found')
else
remove_old_files()
cursor.onerror = ->
console.error(@error.name)
return
onready : (callback) ->
callback = (callback || ->).bind(@)
if db
callback()
else
on_db_ready.push(callback)
return
| true | ###*
* @package CleverStyle Music
* @category app
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* @copyright Copyright (c) 2014-2015, PI:NAME:<NAME>END_PI
* @license MIT License, see license.txt
###
if !window.indexedDB
alert "Indexed DB is not supported O_o"
return
db = null
on_db_ready = []
music_storage = navigator.getDeviceStorage('music')
request = indexedDB.open('music_db', 1)
request.onsuccess = ->
db = request.result
while callback = on_db_ready.shift()
callback()
return
request.onerror = (e) ->
console.error(e)
return
request.onupgradeneeded = ->
db = request.result
if db.objectStoreNames.contains('music')
db.deleteObjectStore('music')
music_store = db.createObjectStore(
'music'
keyPath : 'id'
autoIncrement : true
)
music_store.createIndex(
'name'
'name'
unique : true
)
meta_store = db.createObjectStore(
'meta'
keyPath : 'id'
)
meta_store.createIndex('title', 'title')
meta_store.createIndex('artist', 'artist')
meta_store.createIndex('album', 'album')
meta_store.createIndex('genre', 'genre')
meta_store.createIndex('year', 'year')
db.transaction.oncomplete = ->
while callback = on_db_ready.shift()
callback()
return
library_size = -1
cs.music_library =
add : (name, callback) ->
callback = (callback || ->).bind(@)
@onready ->
put_transaction = db
.transaction(['music'], 'readwrite')
.objectStore('music')
.put(
name : name
)
put_transaction.onsuccess = callback
put_transaction.onerror = callback
parse_metadata : (name, callback) ->
callback = (callback || ->).bind(@)
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(name).onsuccess = ->
if @result
data = @result
music_storage.get(data.name).onsuccess = ->
if @result
store = (metadata) ->
store_object = db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.put(
id : data.id
title : metadata.title || ''
artist : metadata.artist || ''
album : metadata.album || ''
genre : metadata.genre || ''
year : metadata.year || metadata.recordingTime || ''
rated : metadata.rated || 0
)
store_object.onsuccess = ->
callback()
store_object.onerror = ->
callback()
parseAudioMetadata(
@result
(metadata) ->
store(metadata)
=>
# If unable to get metadata with previous parser - try another one
url = URL.createObjectURL(@result)
asset = AV.Asset.fromURL(url)
asset.get('metadata', (metadata) ->
URL.revokeObjectURL(url)
if !metadata
callback()
return
store(metadata)
)
asset.on('error', ->
# Get filename
metadata = data.name.split('/').pop()
# remove extension
metadata = metadata.split('.')
metadata.pop()
metadata = metadata.join('.')
# Try to split filename on artist and title
metadata = metadata.split('–', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Second trial
metadata = metadata[0].split(' - ', 2)
if metadata.length == 2
store(
artist : $.trim(metadata[0])
title : $.trim(metadata[1])
)
return
# Assume that filename is title
store(
title : $.trim(metadata[0])
)
)
)
get : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['music'])
.objectStore('music')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
get_meta : (id, callback) ->
callback = (callback || ->).bind(@)
@onready ->
db
.transaction(['meta'])
.objectStore('meta')
.get(id).onsuccess = ->
result = @result
if result
callback(result)
else
callback(
id : id
)
get_all : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
all = []
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if filter(result.value)
all.push(result.value)
result.continue()
else
callback(all)
del : (id, callback) ->
@onready ->
db
.transaction(['music'], 'readwrite')
.objectStore('music')
.delete(id)
.onsuccess = ->
db
.transaction(['meta'], 'readwrite')
.objectStore('meta')
.delete(id)
.onsuccess = ->
callback()
size : (callback, filter) ->
callback = (callback || ->).bind(@)
filter = filter || -> true
@onready ->
if library_size >= 0 && !filter
callback(library_size)
calculated_size = 0
db
.transaction(['music'])
.objectStore('music')
.openCursor().onsuccess = ->
result = @result
if result
if !filter || filter(result.value)
++calculated_size
result.continue()
else
if !filter
library_size = calculated_size
callback(calculated_size)
rescan : (done_callback) ->
known_extensions = [
'mp3',
'wave',
'm4a',
'm4b',
'm4p',
'm4r',
'3gp',
'mp4',
'aac',
'ogg',
'oga',
'opus',
'flac',
'alac'
]
done_callback = (done_callback || ->).bind(@)
found_files = 0
@onready ->
new_files = []
remove_old_files = =>
@get_all (all) =>
id_to_remove = []
all.forEach (file) =>
if file.name not in new_files
id_to_remove.push(file.id)
return
remove = (index) =>
if id_to_remove[index]
@del(id_to_remove[index], ->
remove(index + 1)
)
else
done_callback()
remove(0)
do =>
cursor = music_storage.enumerate()
cursor.onsuccess = =>
if cursor.result
file = cursor.result
if known_extensions.indexOf(file.name.split('.').pop()) != -1
db
.transaction(['music'])
.objectStore('music')
.index('name')
.get(file.name).onsuccess = (e) =>
if !e.target.result
@add(file.name, ->
@parse_metadata(file.name, ->
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
)
)
else
new_files.push(file.name)
++found_files
cs.bus.trigger('library/rescan/found', found_files)
cursor.continue()
else
cursor.continue()
else
if !new_files.length
alert _('no_files_found')
else
remove_old_files()
cursor.onerror = ->
console.error(@error.name)
return
onready : (callback) ->
callback = (callback || ->).bind(@)
if db
callback()
else
on_db_ready.push(callback)
return
|
[
{
"context": "binddn: 'cn=admin,dc=example,dc=org'\n passwd: 'admin'\n config:\n binddn: 'cn=admin,cn=config'\n ",
"end": 218,
"score": 0.9980325698852539,
"start": 213,
"tag": "PASSWORD",
"value": "admin"
},
{
"context": " binddn: 'cn=admin,cn=config'\n passwd: ... | packages/ldap/test.sample.coffee | wdavidw/node-mecano | 0 |
module.exports =
tags:
api: true
ldap: false
ldap_acl: false
ldap_index: false
ldap_user: false
ldap:
uri: 'ldap://localhost:389'
binddn: 'cn=admin,dc=example,dc=org'
passwd: 'admin'
config:
binddn: 'cn=admin,cn=config'
passwd: 'config'
suffix_dn: 'dc=example,dc=org'
config: [
label: 'local'
,
label: 'remote'
ssh:
host: '127.0.0.1', username: process.env.USER,
private_key_path: '~/.ssh/id_ed25519'
# Exemple with vagrant:
# ssh:
# host: '127.0.0.1', port: 2222, username: 'vagrant'
# private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key"
]
| 202306 |
module.exports =
tags:
api: true
ldap: false
ldap_acl: false
ldap_index: false
ldap_user: false
ldap:
uri: 'ldap://localhost:389'
binddn: 'cn=admin,dc=example,dc=org'
passwd: '<PASSWORD>'
config:
binddn: 'cn=admin,cn=config'
passwd: '<PASSWORD>'
suffix_dn: 'dc=example,dc=org'
config: [
label: 'local'
,
label: 'remote'
ssh:
host: '127.0.0.1', username: process.env.USER,
private_key_path: '~/.ssh/id_ed25519'
# Exemple with vagrant:
# ssh:
# host: '127.0.0.1', port: 2222, username: 'vagrant'
# private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key"
]
| true |
module.exports =
tags:
api: true
ldap: false
ldap_acl: false
ldap_index: false
ldap_user: false
ldap:
uri: 'ldap://localhost:389'
binddn: 'cn=admin,dc=example,dc=org'
passwd: 'PI:PASSWORD:<PASSWORD>END_PI'
config:
binddn: 'cn=admin,cn=config'
passwd: 'PI:PASSWORD:<PASSWORD>END_PI'
suffix_dn: 'dc=example,dc=org'
config: [
label: 'local'
,
label: 'remote'
ssh:
host: '127.0.0.1', username: process.env.USER,
private_key_path: '~/.ssh/id_ed25519'
# Exemple with vagrant:
# ssh:
# host: '127.0.0.1', port: 2222, username: 'vagrant'
# private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key"
]
|
[
{
"context": "t display the images from mstr.in, inspired by\n# aozora0000/hubot-meshi\n#\n# Configuration:\n# None\n#\n# Comma",
"end": 97,
"score": 0.9947305917739868,
"start": 87,
"tag": "USERNAME",
"value": "aozora0000"
},
{
"context": "] - display the MESHI-TERRO images\n#\n... | src/scripts/mstr.coffee | bouzuya/hubot-mstr | 3 | # Description
# A Hubot script that display the images from mstr.in, inspired by
# aozora0000/hubot-meshi
#
# Configuration:
# None
#
# Commands:
# hubot mstr [bomb [<N>]] - display the MESHI-TERRO images
#
# Author:
# bouzuya <m@bouzuya.net>
#
module.exports = (robot) ->
require('hubot-arm') robot
loading = false
cache = []
merge = (a1, a2) ->
a = a1.slice()
a.push(i2) for i2 in a2 when !a.some((i) -> i.id is i2.id)
a
fetch = (photos, page) ->
robot.arm('request')
method: 'GET'
url: 'https://mstr.in/api/photos.json'
qs:
page: page
json: true
.then (res) ->
photos = merge photos, res.body.photos
robot.arm('timer')(1000).then ->
if res.body.next_page then fetch(photos, page + 1) else photos
load = ->
loading = true
fetch([], 1).then (photos) ->
cache = photos
loading = false
photoUrl = (photo) ->
"https://pic.mstr.in/images/#{photo.uid}.jpg"
shuffle = (a) ->
s = a.slice()
for i in [(s.length - 1)..0]
j = Math.floor(Math.random() * i)
w = s[j]
s[j] = s[i]
s[i] = w
s
robot.respond /mstr(\s+bomb(?:\s+(\d+))?)?$/i, (res) ->
return res.send('Now loading...') if loading
count = parseInt (if res.match[1] then res.match[2] ? '5' else '1'), 10
photos = shuffle(cache)[0...count]
for photo in photos
res.send photoUrl photo
load()
| 121946 | # Description
# A Hubot script that display the images from mstr.in, inspired by
# aozora0000/hubot-meshi
#
# Configuration:
# None
#
# Commands:
# hubot mstr [bomb [<N>]] - display the MESHI-TERRO images
#
# Author:
# bouzuya <<EMAIL>>
#
module.exports = (robot) ->
require('hubot-arm') robot
loading = false
cache = []
merge = (a1, a2) ->
a = a1.slice()
a.push(i2) for i2 in a2 when !a.some((i) -> i.id is i2.id)
a
fetch = (photos, page) ->
robot.arm('request')
method: 'GET'
url: 'https://mstr.in/api/photos.json'
qs:
page: page
json: true
.then (res) ->
photos = merge photos, res.body.photos
robot.arm('timer')(1000).then ->
if res.body.next_page then fetch(photos, page + 1) else photos
load = ->
loading = true
fetch([], 1).then (photos) ->
cache = photos
loading = false
photoUrl = (photo) ->
"https://pic.mstr.in/images/#{photo.uid}.jpg"
shuffle = (a) ->
s = a.slice()
for i in [(s.length - 1)..0]
j = Math.floor(Math.random() * i)
w = s[j]
s[j] = s[i]
s[i] = w
s
robot.respond /mstr(\s+bomb(?:\s+(\d+))?)?$/i, (res) ->
return res.send('Now loading...') if loading
count = parseInt (if res.match[1] then res.match[2] ? '5' else '1'), 10
photos = shuffle(cache)[0...count]
for photo in photos
res.send photoUrl photo
load()
| true | # Description
# A Hubot script that display the images from mstr.in, inspired by
# aozora0000/hubot-meshi
#
# Configuration:
# None
#
# Commands:
# hubot mstr [bomb [<N>]] - display the MESHI-TERRO images
#
# Author:
# bouzuya <PI:EMAIL:<EMAIL>END_PI>
#
module.exports = (robot) ->
require('hubot-arm') robot
loading = false
cache = []
merge = (a1, a2) ->
a = a1.slice()
a.push(i2) for i2 in a2 when !a.some((i) -> i.id is i2.id)
a
fetch = (photos, page) ->
robot.arm('request')
method: 'GET'
url: 'https://mstr.in/api/photos.json'
qs:
page: page
json: true
.then (res) ->
photos = merge photos, res.body.photos
robot.arm('timer')(1000).then ->
if res.body.next_page then fetch(photos, page + 1) else photos
load = ->
loading = true
fetch([], 1).then (photos) ->
cache = photos
loading = false
photoUrl = (photo) ->
"https://pic.mstr.in/images/#{photo.uid}.jpg"
shuffle = (a) ->
s = a.slice()
for i in [(s.length - 1)..0]
j = Math.floor(Math.random() * i)
w = s[j]
s[j] = s[i]
s[i] = w
s
robot.respond /mstr(\s+bomb(?:\s+(\d+))?)?$/i, (res) ->
return res.send('Now loading...') if loading
count = parseInt (if res.match[1] then res.match[2] ? '5' else '1'), 10
photos = shuffle(cache)[0...count]
for photo in photos
res.send photoUrl photo
load()
|
[
{
"context": "ript.\n\tSo make your changes in the .coffee file.\n\tThatcher Peskens\n\t\t\t\t _\n\t\t\t,_(')<\n\t\t\t\\___)\n\n\tForked and Modified b",
"end": 136,
"score": 0.999866783618927,
"start": 120,
"tag": "NAME",
"value": "Thatcher Peskens"
},
{
"context": "com/openwhisk fo... | wsk_tutorial/static/js/terminal.coffee | ZmG/wsk-tutorial | 1 | ###
Please note the javascript is being fully generated from coffeescript.
So make your changes in the .coffee file.
Thatcher Peskens
_
,_(')<
\___)
Forked and Modified by IBM jStart
###
do @myTerminal = ->
# Which terminal emulator version are we
EMULATOR_VERSION = "0.1.5"
@basesettings = {
prompt: '[[b;#fff;]you@tutorial:~$] ',
greetings: """
Imitation is the sincerest form of flattery
We loved Docker's try it approach - so we forked it
Welcome to the IBM OpenWhisk tutorial
Courtesy of IBM jStart (http://ibm.com/jstart)
____ _ ____ _ __
/ __ \____ ___ ____| | / / /_ (_)____/ /__
/ / / / __ \/ _ \/ __ \ | /| / / __ \/ / ___/ //_/
/ /_/ / /_/ / __/ / / / |/ |/ / / / / (__ ) ,<
\____/ .___/\___/_/ /_/|__/|__/_/ /_/_/____/_/|_|
/_/
"""
}
###
Callback definitions. These can be overridden by functions anywhere else
###
@preventDefaultCallback = false
@immediateCallback = (command) ->
console.debug("immediate callback from #{command}")
return
@finishedCallback = (command) ->
console.debug("finished callback from #{command}")
return
@intermediateResults = (string) ->
console.debug("sent #{string}")
return
@currentDockerPs = ""
@currentVolumes = [""]
@currentIceGroups = """
Group Id Name Status Created Updated Port
"""
@currentIcePs = """
Container Id Name Group Image Created State Private IP Public IP Ports
"""
@currentLocalImages = """
Target is local host. Invoking docker with the given arguments...
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
ubuntu latest 8dbd9e392a96 4 months ago 131.5 MB (virtual 131.5 MB)
"""
@currentCloudImages = """
Image Id Created Image Name
d0feae99-b91d-4ce3-bcb4-6128886f6968 Mar 23 10:44:59 2015 registry-ice.ng.bluemix.net/ibmliberty:latest
74831680-1c9c-424e-b8ea-ceede4aa0e40 Mar 23 10:41:24 2015 registry-ice.ng.bluemix.net/ibmnode:latest
"""
###
Base interpreter
###
@interpreter = (input, term) ->
input = input.trim()
inputs = input.split(" ")
command = inputs[0]
if term.loginSequence in [1, 2]
login(term, inputs)
else if command is 'hi'
term.echo 'hi there! What is your name??'
term.push (command, term) ->
term.echo command + ' is a pretty name'
else if command is 'shell'
term.push (command, term) ->
if command is 'cd'
bash(term, inputs)
, {prompt: '> $ '}
else if command is 'r'
location.reload('forceGet')
else if command is '#'
term.echo 'which question?'
else if command is 'cd'
bash(term, inputs)
else if command is "wsk"
wsk(term, inputs)
else if command is "cat"
cat(term, inputs)
else if command is "ls"
term.echo "hello.js"
else if command is "cd" or command is "pwd"
term.echo "This is an emulator, not a shell. Try following the instructions."
else if command is "pull"
term.echo '[[b;#fff;]some text]'
wait(term, 5000, true)
alert term.get_output()
return
## finally
else if command
term.echo "#{inputs[0]}: command not found"
immediateCallback(inputs)
### =======================================
Common utils
======================================= ###
String.prototype.beginsWith = (string) ->
###
Check if 'this' string starts with the inputstring.
###
return(this.indexOf(string) is 0)
Array.prototype.containsAllOfThese = (inputArr) ->
###
This function compares all of the elements in the inputArr
and checks them one by one if they exist in 'this'. When it
finds an element to not exist, it returns false.
###
me = this
valid = false
if inputArr
valid = inputArr.every( (value) ->
if me.indexOf(value) == -1
return false
else
return true
)
return valid
Array.prototype.containsAllOfTheseParts = (inputArr) ->
###
This function is like containsAllofThese, but also matches partial strings.
###
me = this
if inputArr
valid = inputArr.every( (value) ->
for item in me
# we use string matching, but we want to be sensitive to dashes, because '--login' != 'login'
itemDashes = (item.match(/--/g) || []).length
valueDashes = (value.match(/--/g) || []).length
if item.match(value) and itemDashes >= valueDashes
return true
return false
)
return valid
parseInput = (inputs) ->
command = inputs[1]
switches = []
switchArg = false
switchArgs = []
imagename = ""
commands = []
j = 0
# parse args
for input in inputs
if input.startsWith('-') and imagename == ""
switches.push(input)
if switches.length > 0
if not ['-i', '-t', '-d'].containsAllOfThese([input])
switchArg = true
else if switchArg == true
# reset switchArg
switchArg = false
switchArgs.push(input)
else if j > 1 and imagename == "" and input != 'create'
# match wrong names
imagename = input
else if input is 'create'
commands.push (input)
else if imagename != ""
commands.push (input)
else
# nothing?
j++
parsed_input = {
'switches': switches.sortBy(),
'switchArgs': switchArgs,
'imageName': imagename,
'commands': commands,
}
return parsed_input
util_slow_lines = (term, paragraph, keyword, finishedCallback) ->
if keyword
lines = paragraph(keyword).split("\n")
else
lines = paragraph.split("\n")
term.pause()
i = 0
# function calls itself after timeout is done, untill
# all lines are finished
foo = (lines) ->
self.setTimeout ( ->
if lines[i]
term.echo (lines[i])
i++
foo(lines)
else
term.resume()
finishedCallback(term)
), 1000
foo(lines)
wait = (term, time, dots) ->
term.echo "starting to wait"
interval_id = self.setInterval ( -> dots ? term.insert '.'), 500
self.setTimeout ( ->
self.clearInterval(interval_id)
output = term.get_command()
term.echo output
term.echo "done "
), time
###
Bash program
###
bash = (term, inputs) ->
echo = term.echo
insert = term.insert
if not inputs[1]
console.log("none")
else
argument = inputs[1]
if argument.beginsWith('..')
echo "-bash: cd: #{argument}: Permission denied"
else
echo "-bash: cd: #{argument}: No such file or directory"
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK I N T E R P R E T E R --------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
cat = (term, inputs) ->
echo = term.echo
if inputs[1] is "hello.js"
echo wsk_cat_helloWorld
wsk = (term, inputs) ->
echo = term.echo
insert = term.insert
callback = () -> @finishedCallback(inputs)
command = inputs[1]
# no command
if not inputs[1]
console.debug "no args"
echo wsk_no_args
else if inputs[1] is "--help" or inputs[1] is "-h"
echo wsk_help
else if inputs[1] is "action"
if inputs[2] is "create"
if inputs[3] is "hello"
if inputs[4] is "hello.js"
echo wsk_create_action_hello
if inputs[3] is "myAction"
if inputs[4] is "--sequence"
if inputs[5] is "/whisk.system/util/cat,/whisk.system/util/sort"
echo wsk_create_action_sequence
else
echo wsk_unrecognized_arguments
else if inputs[2] is "list"
echo wsk_list_action_hello
else if inputs[2] is "invoke"
if inputs[3] is "hello"
echo wsk_action_invoke_hello
else if inputs[3] is "--blocking"
if inputs[4] is "hello"
echo wsk_action_invoke_blocking_hello
else
echo wsk_no_args
else if inputs[1] is "package"
if inputs[2] is "get"
if inputs[3] is "--summary"
if inputs[4] is "/whisk.system/util"
echo wsk_package_get
else if inputs[1] is "activation"
if inputs[2] is "result"
if inputs[3] is "6bf1f670ee614a7eb5af3c9fde813043"
echo wsk_activation_result
else if inputs[2] is "list"
echo wsk_activation_list
else if inputs[1] is "-v"
if inputs[2] is "action"
if inputs[3] is "create"
if inputs[4] is "hello"
if inputs[5] is "hello.js"
echo wsk_create_action_hello_v
else if inputs[1] is "images"
echo currentCloudImages
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK C O N T E N T ---------------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
wsk_help = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
OpenWhisk is a distributed compute service to add event-driven logic to your
apps.
optional arguments:
-h, --help show this help message and exit
-v, --verbose verbose output
--apihost hostname whisk API host
--apiversion version whisk API version
available commands:
{action,activation,namespace,package,rule,trigger,sdk,property,list}
action work with actions
activation work with activations
namespace work with namespaces
package work with packages
rule work with rules
trigger work with triggers
sdk work with the SDK
property work with whisk properties
list list all triggers, actions, and rules in the registry
Learn more at https://developer.ibm.com/openwhisk fork on GitHub
https://github.com/openwhisk. All trademarks are the property of their
respective owners.
"""
wsk_invalid_choice = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: argument cmd: invalid choice: (choose from 'action', 'activation', 'namespace', 'package', 'rule', 'trigger', 'sdk', 'property', 'list')
"""
wsk_cat_helloWorld = \
"""
function main(params) {
return {payload: 'Hello world'};
}
"""
wsk_create_action_hello = \
"""
ok: created action hello
"""
wsk_create_action_hello_v = \
"""
{'apihost': 'openwhisk.ng.bluemix.net', 'namespace': 'jstart', 'clibuild': '2016-03-03T09:55:47-06:00', 'apiversion': 'v1'}
========
REQUEST:
PUT https://openwhisk.ng.bluemix.net/api/v1/namespaces/jstart/actions/hello
Headers sent:
{
"Authorization": "Basic
UyLWJJkYu65JKhu7YjM0ZDcwODhlNzBiOmlFS3RWMHl0UWdIT1SxUGNrMUFJRHUzSF2VlFSV53hDUnZlVXhyMGJpbTBGeH827=",
"Content-Type": "application/json"
}
Body sent:
{"exec": {"kind": "nodejs", "code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"}}
--------
RESPONSE:
Got response with code 200
Body received:
{
"name": "hello",
"publish": false,
"annotations": [],
"version": "0.0.1",
"exec": {
"kind": "nodejs",
"code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"
},
"parameters": [],
"limits": {
"timeout": 60000,
"memory": 256
},
"namespace": "jstart"
}
========
ok: created action hello
"""
wsk_list_action_hello = \
"""
actions
hello private
"""
wsk_action_invoke_hello = \
"""
ok: invoked hello with id 6bf1f670ee614a7eb5af3c9fde813043
"""
wsk_action_invoke_blocking_hello = \
"""
ok: invoked hello with id 44794bd6aab74415b4e42a308d880e5b
response:
{
"result": {
"payload": "Hello world"
},
"status": "success",
"success": true
}
"""
wsk_activation_result = \
"""
{
"payload" : "Hello world"
}
"""
wsk_activation_list = \
"""
activations
44794bd6aab74415b4e42a308d880e5b hello
6bf1f670ee614a7eb5af3c9fde813043 hello
"""
wsk_no_args = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: too few arguments
"""
wsk_create_action_sequence = \
"""
ok: created action sequenceOfActions
"""
wsk_unrecognized_arguments = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: unrecognized arguments
"""
wsk_package_get = \
"""
package /whisk.system/util
action /whisk.system/util/cat: Concatenate array of strings, and split lines into an array
action /whisk.system/util/head: Filter first K array elements and discard rest
action /whisk.system/util/date: Get current date and time
action /whisk.system/util/sort: Sort array
"""
return this | 23091 | ###
Please note the javascript is being fully generated from coffeescript.
So make your changes in the .coffee file.
<NAME>
_
,_(')<
\___)
Forked and Modified by IBM jStart
###
do @myTerminal = ->
# Which terminal emulator version are we
EMULATOR_VERSION = "0.1.5"
@basesettings = {
prompt: '[[b;#fff;]you@tutorial:~$] ',
greetings: """
Imitation is the sincerest form of flattery
We loved Docker's try it approach - so we forked it
Welcome to the IBM OpenWhisk tutorial
Courtesy of IBM jStart (http://ibm.com/jstart)
____ _ ____ _ __
/ __ \____ ___ ____| | / / /_ (_)____/ /__
/ / / / __ \/ _ \/ __ \ | /| / / __ \/ / ___/ //_/
/ /_/ / /_/ / __/ / / / |/ |/ / / / / (__ ) ,<
\____/ .___/\___/_/ /_/|__/|__/_/ /_/_/____/_/|_|
/_/
"""
}
###
Callback definitions. These can be overridden by functions anywhere else
###
@preventDefaultCallback = false
@immediateCallback = (command) ->
console.debug("immediate callback from #{command}")
return
@finishedCallback = (command) ->
console.debug("finished callback from #{command}")
return
@intermediateResults = (string) ->
console.debug("sent #{string}")
return
@currentDockerPs = ""
@currentVolumes = [""]
@currentIceGroups = """
Group Id Name Status Created Updated Port
"""
@currentIcePs = """
Container Id Name Group Image Created State Private IP Public IP Ports
"""
@currentLocalImages = """
Target is local host. Invoking docker with the given arguments...
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
ubuntu latest 8dbd9e392a96 4 months ago 131.5 MB (virtual 131.5 MB)
"""
@currentCloudImages = """
Image Id Created Image Name
d0feae99-b91d-4ce3-bcb4-6128886f6968 Mar 23 10:44:59 2015 registry-ice.ng.bluemix.net/ibmliberty:latest
74831680-1c9c-424e-b8ea-ceede4aa0e40 Mar 23 10:41:24 2015 registry-ice.ng.bluemix.net/ibmnode:latest
"""
###
Base interpreter
###
@interpreter = (input, term) ->
input = input.trim()
inputs = input.split(" ")
command = inputs[0]
if term.loginSequence in [1, 2]
login(term, inputs)
else if command is 'hi'
term.echo 'hi there! What is your name??'
term.push (command, term) ->
term.echo command + ' is a pretty name'
else if command is 'shell'
term.push (command, term) ->
if command is 'cd'
bash(term, inputs)
, {prompt: '> $ '}
else if command is 'r'
location.reload('forceGet')
else if command is '#'
term.echo 'which question?'
else if command is 'cd'
bash(term, inputs)
else if command is "wsk"
wsk(term, inputs)
else if command is "cat"
cat(term, inputs)
else if command is "ls"
term.echo "hello.js"
else if command is "cd" or command is "pwd"
term.echo "This is an emulator, not a shell. Try following the instructions."
else if command is "pull"
term.echo '[[b;#fff;]some text]'
wait(term, 5000, true)
alert term.get_output()
return
## finally
else if command
term.echo "#{inputs[0]}: command not found"
immediateCallback(inputs)
### =======================================
Common utils
======================================= ###
String.prototype.beginsWith = (string) ->
###
Check if 'this' string starts with the inputstring.
###
return(this.indexOf(string) is 0)
Array.prototype.containsAllOfThese = (inputArr) ->
###
This function compares all of the elements in the inputArr
and checks them one by one if they exist in 'this'. When it
finds an element to not exist, it returns false.
###
me = this
valid = false
if inputArr
valid = inputArr.every( (value) ->
if me.indexOf(value) == -1
return false
else
return true
)
return valid
Array.prototype.containsAllOfTheseParts = (inputArr) ->
###
This function is like containsAllofThese, but also matches partial strings.
###
me = this
if inputArr
valid = inputArr.every( (value) ->
for item in me
# we use string matching, but we want to be sensitive to dashes, because '--login' != 'login'
itemDashes = (item.match(/--/g) || []).length
valueDashes = (value.match(/--/g) || []).length
if item.match(value) and itemDashes >= valueDashes
return true
return false
)
return valid
parseInput = (inputs) ->
command = inputs[1]
switches = []
switchArg = false
switchArgs = []
imagename = ""
commands = []
j = 0
# parse args
for input in inputs
if input.startsWith('-') and imagename == ""
switches.push(input)
if switches.length > 0
if not ['-i', '-t', '-d'].containsAllOfThese([input])
switchArg = true
else if switchArg == true
# reset switchArg
switchArg = false
switchArgs.push(input)
else if j > 1 and imagename == "" and input != 'create'
# match wrong names
imagename = input
else if input is 'create'
commands.push (input)
else if imagename != ""
commands.push (input)
else
# nothing?
j++
parsed_input = {
'switches': switches.sortBy(),
'switchArgs': switchArgs,
'imageName': imagename,
'commands': commands,
}
return parsed_input
util_slow_lines = (term, paragraph, keyword, finishedCallback) ->
if keyword
lines = paragraph(keyword).split("\n")
else
lines = paragraph.split("\n")
term.pause()
i = 0
# function calls itself after timeout is done, untill
# all lines are finished
foo = (lines) ->
self.setTimeout ( ->
if lines[i]
term.echo (lines[i])
i++
foo(lines)
else
term.resume()
finishedCallback(term)
), 1000
foo(lines)
wait = (term, time, dots) ->
term.echo "starting to wait"
interval_id = self.setInterval ( -> dots ? term.insert '.'), 500
self.setTimeout ( ->
self.clearInterval(interval_id)
output = term.get_command()
term.echo output
term.echo "done "
), time
###
Bash program
###
bash = (term, inputs) ->
echo = term.echo
insert = term.insert
if not inputs[1]
console.log("none")
else
argument = inputs[1]
if argument.beginsWith('..')
echo "-bash: cd: #{argument}: Permission denied"
else
echo "-bash: cd: #{argument}: No such file or directory"
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK I N T E R P R E T E R --------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
cat = (term, inputs) ->
echo = term.echo
if inputs[1] is "hello.js"
echo wsk_cat_helloWorld
wsk = (term, inputs) ->
echo = term.echo
insert = term.insert
callback = () -> @finishedCallback(inputs)
command = inputs[1]
# no command
if not inputs[1]
console.debug "no args"
echo wsk_no_args
else if inputs[1] is "--help" or inputs[1] is "-h"
echo wsk_help
else if inputs[1] is "action"
if inputs[2] is "create"
if inputs[3] is "hello"
if inputs[4] is "hello.js"
echo wsk_create_action_hello
if inputs[3] is "myAction"
if inputs[4] is "--sequence"
if inputs[5] is "/whisk.system/util/cat,/whisk.system/util/sort"
echo wsk_create_action_sequence
else
echo wsk_unrecognized_arguments
else if inputs[2] is "list"
echo wsk_list_action_hello
else if inputs[2] is "invoke"
if inputs[3] is "hello"
echo wsk_action_invoke_hello
else if inputs[3] is "--blocking"
if inputs[4] is "hello"
echo wsk_action_invoke_blocking_hello
else
echo wsk_no_args
else if inputs[1] is "package"
if inputs[2] is "get"
if inputs[3] is "--summary"
if inputs[4] is "/whisk.system/util"
echo wsk_package_get
else if inputs[1] is "activation"
if inputs[2] is "result"
if inputs[3] is "6bf1f670ee614a7eb5af3c9fde813043"
echo wsk_activation_result
else if inputs[2] is "list"
echo wsk_activation_list
else if inputs[1] is "-v"
if inputs[2] is "action"
if inputs[3] is "create"
if inputs[4] is "hello"
if inputs[5] is "hello.js"
echo wsk_create_action_hello_v
else if inputs[1] is "images"
echo currentCloudImages
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK C O N T E N T ---------------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
wsk_help = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
OpenWhisk is a distributed compute service to add event-driven logic to your
apps.
optional arguments:
-h, --help show this help message and exit
-v, --verbose verbose output
--apihost hostname whisk API host
--apiversion version whisk API version
available commands:
{action,activation,namespace,package,rule,trigger,sdk,property,list}
action work with actions
activation work with activations
namespace work with namespaces
package work with packages
rule work with rules
trigger work with triggers
sdk work with the SDK
property work with whisk properties
list list all triggers, actions, and rules in the registry
Learn more at https://developer.ibm.com/openwhisk fork on GitHub
https://github.com/openwhisk. All trademarks are the property of their
respective owners.
"""
wsk_invalid_choice = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: argument cmd: invalid choice: (choose from 'action', 'activation', 'namespace', 'package', 'rule', 'trigger', 'sdk', 'property', 'list')
"""
wsk_cat_helloWorld = \
"""
function main(params) {
return {payload: 'Hello world'};
}
"""
wsk_create_action_hello = \
"""
ok: created action hello
"""
wsk_create_action_hello_v = \
"""
{'apihost': 'openwhisk.ng.bluemix.net', 'namespace': 'jstart', 'clibuild': '2016-03-03T09:55:47-06:00', 'apiversion': 'v1'}
========
REQUEST:
PUT https://openwhisk.ng.bluemix.net/api/v1/namespaces/jstart/actions/hello
Headers sent:
{
"Authorization": "Basic
U<KEY>
"Content-Type": "application/json"
}
Body sent:
{"exec": {"kind": "nodejs", "code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"}}
--------
RESPONSE:
Got response with code 200
Body received:
{
"name": "hello",
"publish": false,
"annotations": [],
"version": "0.0.1",
"exec": {
"kind": "nodejs",
"code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"
},
"parameters": [],
"limits": {
"timeout": 60000,
"memory": 256
},
"namespace": "jstart"
}
========
ok: created action hello
"""
wsk_list_action_hello = \
"""
actions
hello private
"""
wsk_action_invoke_hello = \
"""
ok: invoked hello with id 6bf1f670ee614a7eb5af3c9fde813043
"""
wsk_action_invoke_blocking_hello = \
"""
ok: invoked hello with id 44794bd6aab74415b4e42a308d880e5b
response:
{
"result": {
"payload": "Hello world"
},
"status": "success",
"success": true
}
"""
wsk_activation_result = \
"""
{
"payload" : "Hello world"
}
"""
wsk_activation_list = \
"""
activations
44794bd6aab74415b4e42a308d880e5b hello
6bf1f670ee614a7eb5af3c9fde813043 hello
"""
wsk_no_args = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: too few arguments
"""
wsk_create_action_sequence = \
"""
ok: created action sequenceOfActions
"""
wsk_unrecognized_arguments = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: unrecognized arguments
"""
wsk_package_get = \
"""
package /whisk.system/util
action /whisk.system/util/cat: Concatenate array of strings, and split lines into an array
action /whisk.system/util/head: Filter first K array elements and discard rest
action /whisk.system/util/date: Get current date and time
action /whisk.system/util/sort: Sort array
"""
return this | true | ###
Please note the javascript is being fully generated from coffeescript.
So make your changes in the .coffee file.
PI:NAME:<NAME>END_PI
_
,_(')<
\___)
Forked and Modified by IBM jStart
###
do @myTerminal = ->
# Which terminal emulator version are we
EMULATOR_VERSION = "0.1.5"
@basesettings = {
prompt: '[[b;#fff;]you@tutorial:~$] ',
greetings: """
Imitation is the sincerest form of flattery
We loved Docker's try it approach - so we forked it
Welcome to the IBM OpenWhisk tutorial
Courtesy of IBM jStart (http://ibm.com/jstart)
____ _ ____ _ __
/ __ \____ ___ ____| | / / /_ (_)____/ /__
/ / / / __ \/ _ \/ __ \ | /| / / __ \/ / ___/ //_/
/ /_/ / /_/ / __/ / / / |/ |/ / / / / (__ ) ,<
\____/ .___/\___/_/ /_/|__/|__/_/ /_/_/____/_/|_|
/_/
"""
}
###
Callback definitions. These can be overridden by functions anywhere else
###
@preventDefaultCallback = false
@immediateCallback = (command) ->
console.debug("immediate callback from #{command}")
return
@finishedCallback = (command) ->
console.debug("finished callback from #{command}")
return
@intermediateResults = (string) ->
console.debug("sent #{string}")
return
@currentDockerPs = ""
@currentVolumes = [""]
@currentIceGroups = """
Group Id Name Status Created Updated Port
"""
@currentIcePs = """
Container Id Name Group Image Created State Private IP Public IP Ports
"""
@currentLocalImages = """
Target is local host. Invoking docker with the given arguments...
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
ubuntu latest 8dbd9e392a96 4 months ago 131.5 MB (virtual 131.5 MB)
"""
@currentCloudImages = """
Image Id Created Image Name
d0feae99-b91d-4ce3-bcb4-6128886f6968 Mar 23 10:44:59 2015 registry-ice.ng.bluemix.net/ibmliberty:latest
74831680-1c9c-424e-b8ea-ceede4aa0e40 Mar 23 10:41:24 2015 registry-ice.ng.bluemix.net/ibmnode:latest
"""
###
Base interpreter
###
@interpreter = (input, term) ->
input = input.trim()
inputs = input.split(" ")
command = inputs[0]
if term.loginSequence in [1, 2]
login(term, inputs)
else if command is 'hi'
term.echo 'hi there! What is your name??'
term.push (command, term) ->
term.echo command + ' is a pretty name'
else if command is 'shell'
term.push (command, term) ->
if command is 'cd'
bash(term, inputs)
, {prompt: '> $ '}
else if command is 'r'
location.reload('forceGet')
else if command is '#'
term.echo 'which question?'
else if command is 'cd'
bash(term, inputs)
else if command is "wsk"
wsk(term, inputs)
else if command is "cat"
cat(term, inputs)
else if command is "ls"
term.echo "hello.js"
else if command is "cd" or command is "pwd"
term.echo "This is an emulator, not a shell. Try following the instructions."
else if command is "pull"
term.echo '[[b;#fff;]some text]'
wait(term, 5000, true)
alert term.get_output()
return
## finally
else if command
term.echo "#{inputs[0]}: command not found"
immediateCallback(inputs)
### =======================================
Common utils
======================================= ###
String.prototype.beginsWith = (string) ->
###
Check if 'this' string starts with the inputstring.
###
return(this.indexOf(string) is 0)
Array.prototype.containsAllOfThese = (inputArr) ->
###
This function compares all of the elements in the inputArr
and checks them one by one if they exist in 'this'. When it
finds an element to not exist, it returns false.
###
me = this
valid = false
if inputArr
valid = inputArr.every( (value) ->
if me.indexOf(value) == -1
return false
else
return true
)
return valid
Array.prototype.containsAllOfTheseParts = (inputArr) ->
###
This function is like containsAllofThese, but also matches partial strings.
###
me = this
if inputArr
valid = inputArr.every( (value) ->
for item in me
# we use string matching, but we want to be sensitive to dashes, because '--login' != 'login'
itemDashes = (item.match(/--/g) || []).length
valueDashes = (value.match(/--/g) || []).length
if item.match(value) and itemDashes >= valueDashes
return true
return false
)
return valid
parseInput = (inputs) ->
command = inputs[1]
switches = []
switchArg = false
switchArgs = []
imagename = ""
commands = []
j = 0
# parse args
for input in inputs
if input.startsWith('-') and imagename == ""
switches.push(input)
if switches.length > 0
if not ['-i', '-t', '-d'].containsAllOfThese([input])
switchArg = true
else if switchArg == true
# reset switchArg
switchArg = false
switchArgs.push(input)
else if j > 1 and imagename == "" and input != 'create'
# match wrong names
imagename = input
else if input is 'create'
commands.push (input)
else if imagename != ""
commands.push (input)
else
# nothing?
j++
parsed_input = {
'switches': switches.sortBy(),
'switchArgs': switchArgs,
'imageName': imagename,
'commands': commands,
}
return parsed_input
util_slow_lines = (term, paragraph, keyword, finishedCallback) ->
if keyword
lines = paragraph(keyword).split("\n")
else
lines = paragraph.split("\n")
term.pause()
i = 0
# function calls itself after timeout is done, untill
# all lines are finished
foo = (lines) ->
self.setTimeout ( ->
if lines[i]
term.echo (lines[i])
i++
foo(lines)
else
term.resume()
finishedCallback(term)
), 1000
foo(lines)
wait = (term, time, dots) ->
term.echo "starting to wait"
interval_id = self.setInterval ( -> dots ? term.insert '.'), 500
self.setTimeout ( ->
self.clearInterval(interval_id)
output = term.get_command()
term.echo output
term.echo "done "
), time
###
Bash program
###
bash = (term, inputs) ->
echo = term.echo
insert = term.insert
if not inputs[1]
console.log("none")
else
argument = inputs[1]
if argument.beginsWith('..')
echo "-bash: cd: #{argument}: Permission denied"
else
echo "-bash: cd: #{argument}: No such file or directory"
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK I N T E R P R E T E R --------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
cat = (term, inputs) ->
echo = term.echo
if inputs[1] is "hello.js"
echo wsk_cat_helloWorld
wsk = (term, inputs) ->
echo = term.echo
insert = term.insert
callback = () -> @finishedCallback(inputs)
command = inputs[1]
# no command
if not inputs[1]
console.debug "no args"
echo wsk_no_args
else if inputs[1] is "--help" or inputs[1] is "-h"
echo wsk_help
else if inputs[1] is "action"
if inputs[2] is "create"
if inputs[3] is "hello"
if inputs[4] is "hello.js"
echo wsk_create_action_hello
if inputs[3] is "myAction"
if inputs[4] is "--sequence"
if inputs[5] is "/whisk.system/util/cat,/whisk.system/util/sort"
echo wsk_create_action_sequence
else
echo wsk_unrecognized_arguments
else if inputs[2] is "list"
echo wsk_list_action_hello
else if inputs[2] is "invoke"
if inputs[3] is "hello"
echo wsk_action_invoke_hello
else if inputs[3] is "--blocking"
if inputs[4] is "hello"
echo wsk_action_invoke_blocking_hello
else
echo wsk_no_args
else if inputs[1] is "package"
if inputs[2] is "get"
if inputs[3] is "--summary"
if inputs[4] is "/whisk.system/util"
echo wsk_package_get
else if inputs[1] is "activation"
if inputs[2] is "result"
if inputs[3] is "6bf1f670ee614a7eb5af3c9fde813043"
echo wsk_activation_result
else if inputs[2] is "list"
echo wsk_activation_list
else if inputs[1] is "-v"
if inputs[2] is "action"
if inputs[3] is "create"
if inputs[4] is "hello"
if inputs[5] is "hello.js"
echo wsk_create_action_hello_v
else if inputs[1] is "images"
echo currentCloudImages
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
# WSK C O N T E N T ---------------------------------------------------------------
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
wsk_help = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
OpenWhisk is a distributed compute service to add event-driven logic to your
apps.
optional arguments:
-h, --help show this help message and exit
-v, --verbose verbose output
--apihost hostname whisk API host
--apiversion version whisk API version
available commands:
{action,activation,namespace,package,rule,trigger,sdk,property,list}
action work with actions
activation work with activations
namespace work with namespaces
package work with packages
rule work with rules
trigger work with triggers
sdk work with the SDK
property work with whisk properties
list list all triggers, actions, and rules in the registry
Learn more at https://developer.ibm.com/openwhisk fork on GitHub
https://github.com/openwhisk. All trademarks are the property of their
respective owners.
"""
wsk_invalid_choice = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: argument cmd: invalid choice: (choose from 'action', 'activation', 'namespace', 'package', 'rule', 'trigger', 'sdk', 'property', 'list')
"""
wsk_cat_helloWorld = \
"""
function main(params) {
return {payload: 'Hello world'};
}
"""
wsk_create_action_hello = \
"""
ok: created action hello
"""
wsk_create_action_hello_v = \
"""
{'apihost': 'openwhisk.ng.bluemix.net', 'namespace': 'jstart', 'clibuild': '2016-03-03T09:55:47-06:00', 'apiversion': 'v1'}
========
REQUEST:
PUT https://openwhisk.ng.bluemix.net/api/v1/namespaces/jstart/actions/hello
Headers sent:
{
"Authorization": "Basic
UPI:KEY:<KEY>END_PI
"Content-Type": "application/json"
}
Body sent:
{"exec": {"kind": "nodejs", "code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"}}
--------
RESPONSE:
Got response with code 200
Body received:
{
"name": "hello",
"publish": false,
"annotations": [],
"version": "0.0.1",
"exec": {
"kind": "nodejs",
"code": "function main(params) {\n return {payload: 'Hello, ' + params.name + ' from ' + params.place};\n}\n\n"
},
"parameters": [],
"limits": {
"timeout": 60000,
"memory": 256
},
"namespace": "jstart"
}
========
ok: created action hello
"""
wsk_list_action_hello = \
"""
actions
hello private
"""
wsk_action_invoke_hello = \
"""
ok: invoked hello with id 6bf1f670ee614a7eb5af3c9fde813043
"""
wsk_action_invoke_blocking_hello = \
"""
ok: invoked hello with id 44794bd6aab74415b4e42a308d880e5b
response:
{
"result": {
"payload": "Hello world"
},
"status": "success",
"success": true
}
"""
wsk_activation_result = \
"""
{
"payload" : "Hello world"
}
"""
wsk_activation_list = \
"""
activations
44794bd6aab74415b4e42a308d880e5b hello
6bf1f670ee614a7eb5af3c9fde813043 hello
"""
wsk_no_args = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: too few arguments
"""
wsk_create_action_sequence = \
"""
ok: created action sequenceOfActions
"""
wsk_unrecognized_arguments = \
"""
usage: wsk [-h] [-v] [--apihost hostname] [--apiversion version]
{action,activation,namespace,package,rule,trigger,sdk,property,list}
...
wsk: error: unrecognized arguments
"""
wsk_package_get = \
"""
package /whisk.system/util
action /whisk.system/util/cat: Concatenate array of strings, and split lines into an array
action /whisk.system/util/head: Filter first K array elements and discard rest
action /whisk.system/util/date: Get current date and time
action /whisk.system/util/sort: Sort array
"""
return this |
[
{
"context": " of file\n nfile[k] = v\n\n __key = \"__#{key}\"\n if nfile[__key] is undefined\n ",
"end": 1075,
"score": 0.8879501819610596,
"start": 1070,
"tag": "KEY",
"value": "\"__#{"
}
] | src/pipe.coffee | durko/bungle | 0 | crypto = require "crypto"
minimatch = require "minimatch"
RSVP = require "rsvp"
clone = (obj) ->
return obj if not obj? or typeof obj isnt 'object'
newInstance = new obj.constructor()
newInstance[key] = clone obj[key] for key of obj
newInstance
cloned = (v) -> if typeof v is "function" then v.apply @ else clone v
class BasePipe
@schema: -> {}
@configDefaults:
pattern: "**/*"
@stateDefaults:
localFiles: {}
constructor: (@config, @state, @pipeline) ->
@log "debug", "Pipe created" if @config.debug
obj = @constructor
while obj
@config[k] ?= cloned v for k, v of obj.configDefaults
@state[k] ?= cloned v for k, v of obj.stateDefaults
obj = obj.__super__?.constructor
@outputs = []
init: -> null
start: (res) -> res
stop: -> null
#### Helpers for modifications
# Clone file and modify property
modifyFile: (file, key, value) ->
nfile = {}
for k, v of file
nfile[k] = v
__key = "__#{key}"
if nfile[__key] is undefined
nfile[__key] = file[key]
nfile[key] = value
nfile
#### Pipe local file management
# Add a new file into the pipeline.
fileAdd: (name) ->
return RSVP.Promise.resolve() if @state.localFiles[name]
@log "debug", "LA #{name}" if @config.debug
BasePipe::add.call @,
name:name
add:false
.then (res) =>
@state.localFiles[name] =
name:name
added:res.add
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Message file change
fileChange: (name, content) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LC #{name}" if @config.debug
if file.changing
@state.localFiles[name].reread = true
return
file.changing = true
if content is undefined
p = @getFileContent name
else
p = RSVP.Promise.resolve content
p
.then (content) =>
hash = crypto.createHash "sha1"
hash.update content
digest = hash.digest "hex"
if digest isnt file.hash
file.hash = digest
sendfile = @modifyFile file, "content", content
BasePipe::change.call @, sendfile
else
null
.then (res) =>
file.changing = false
if file.reread
file.reread = false
@fileChange name
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Remove a file from the pipeline
fileUnlink: (name) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LD #{name}" if @config.debug
BasePipe::unlink.call @, file
.then (res) =>
delete @state.localFiles[name]
res
.catch (err) =>
@log "error", err
@log "error", err.stack
#### Interpipe communication
# Process add from previous pipes
_add_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "A #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@add file
else if @config.passthrough
BasePipe::add.call @, file
else
file
# Relay add to subsequent pipes
add: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._add_in arg
.then (res) ->
res.reduce (p, file) ->
name:file.name
add:p.add||file.add
, {}
else
p
# Process change from previous pipes
_change_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "M #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@change file
else if @config.passthrough
BasePipe::change.call @, file
else
file
# Relay change to subsequent pipes
change: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._change_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
# Process unlink from previous pipes
_unlink_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "D #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@unlink file
else if @config.passthrough
BasePipe::unlink.call @, file
else
file
# Relay unlink to subsequent pipes
unlink: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._unlink_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
broadcast: (req) ->
log: (level, args...) ->
@pipeline.logger.log level, "#{@config.type}(#{@config.id})", args...
class CompileInputListPipe extends BasePipe
@stateDefaults:
files: []
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files.push file.name
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
@state.files.splice (@state.files.indexOf file.name), 1
@compile() if @started
if @config.passthrough
super file
else
file
class CompileInputDataPipe extends BasePipe
@stateDefaults:
files: {}
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files[file.name] = ""
if @config.passthrough
super file
else
file
change: (file) ->
@state.files[file.name] = file.content
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
delete @state.files[file.name]
@compile() if @started
if @config.passthrough
super file
else
file
class DependsPipe extends BasePipe
@stateDefaults:
depsFor: {}
depsOn: {}
files: {}
backlog: {}
_compile: (names) ->
depsOn = @state.depsOn
names.reduce (p, i) =>
if Object.keys(depsOn[i]).length
p
else
p
.then => @fileAdd i
.then => @compile i
, RSVP.Promise.resolve()
start: ->
@started = true
depsOn = @state.depsOn
names = []
for name of @state.backlog
delete @state.backlog[name]
names.push name if not Object.keys(depsOn[name]).length
@_compile names
add: (file) ->
file.add = true
@state.depsFor[file.name] = {}
@state.depsOn[file.name] = {}
@state.files[file.name] = new Buffer ""
depsOn = @state.depsOn[file.name]
for name, deps of @state.depsFor
depsOn[name] = true if deps[file.name]
if @started
if Object.keys(depsOn).length
file
else
@fileAdd file.name
else
@state.backlog[file.name] = true
file
change: (file) ->
@state.files[file.name] = file.content
deps = @dependenciesFor file.name
old = @state.depsFor[file.name]
@state.depsFor[file.name] = {}
for dep in deps
@state.depsFor[file.name][dep] = true
deps = @state.depsFor[file.name]
add = []
unlink = []
for dep of old
# obsolete dep
if not deps[dep]
c = @state.depsOn[dep]
if c
delete c[file.name]
add.push dep
for dep of deps
# new dep
if not old[dep]
c = @state.depsOn[dep]
if c
c[file.name] = true
unlink.push dep
promise = unlink.reduce (p, i) =>
p.then => @fileUnlink i
, RSVP.Promise.resolve()
if @started
promise = promise.then => @_compile add
else
for name in add
@state.backlog[name] = true
promise.then =>
if @started
depsOn = @state.depsOn[file.name]
if Object.keys(depsOn).length
@_compile Object.keys depsOn
else
@_compile [file.name]
else
@state.backlog[file.name] = true
file
unlink: (file) ->
deps = @state.depsFor[file.name]
delete @state.depsFor[file.name]
delete @state.depsOn[file.name]
delete @state.files[file.name]
delete @state.backlog[file.name]
add = []
for dep of deps
depsOn = @state.depsOn[dep]
delete depsOn[file.name]
add.push dep
if @started
@_compile add
.then =>
@fileUnlink file.name
else
for name in add
@state.backlog[name] = true
@fileUnlink file.name
module.exports.BasePipe = BasePipe
module.exports.CompileInputListPipe = CompileInputListPipe
module.exports.CompileInputDataPipe = CompileInputDataPipe
module.exports.DependsPipe = DependsPipe
| 148240 | crypto = require "crypto"
minimatch = require "minimatch"
RSVP = require "rsvp"
clone = (obj) ->
return obj if not obj? or typeof obj isnt 'object'
newInstance = new obj.constructor()
newInstance[key] = clone obj[key] for key of obj
newInstance
cloned = (v) -> if typeof v is "function" then v.apply @ else clone v
class BasePipe
@schema: -> {}
@configDefaults:
pattern: "**/*"
@stateDefaults:
localFiles: {}
constructor: (@config, @state, @pipeline) ->
@log "debug", "Pipe created" if @config.debug
obj = @constructor
while obj
@config[k] ?= cloned v for k, v of obj.configDefaults
@state[k] ?= cloned v for k, v of obj.stateDefaults
obj = obj.__super__?.constructor
@outputs = []
init: -> null
start: (res) -> res
stop: -> null
#### Helpers for modifications
# Clone file and modify property
modifyFile: (file, key, value) ->
nfile = {}
for k, v of file
nfile[k] = v
__key = <KEY>key}"
if nfile[__key] is undefined
nfile[__key] = file[key]
nfile[key] = value
nfile
#### Pipe local file management
# Add a new file into the pipeline.
fileAdd: (name) ->
return RSVP.Promise.resolve() if @state.localFiles[name]
@log "debug", "LA #{name}" if @config.debug
BasePipe::add.call @,
name:name
add:false
.then (res) =>
@state.localFiles[name] =
name:name
added:res.add
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Message file change
fileChange: (name, content) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LC #{name}" if @config.debug
if file.changing
@state.localFiles[name].reread = true
return
file.changing = true
if content is undefined
p = @getFileContent name
else
p = RSVP.Promise.resolve content
p
.then (content) =>
hash = crypto.createHash "sha1"
hash.update content
digest = hash.digest "hex"
if digest isnt file.hash
file.hash = digest
sendfile = @modifyFile file, "content", content
BasePipe::change.call @, sendfile
else
null
.then (res) =>
file.changing = false
if file.reread
file.reread = false
@fileChange name
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Remove a file from the pipeline
fileUnlink: (name) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LD #{name}" if @config.debug
BasePipe::unlink.call @, file
.then (res) =>
delete @state.localFiles[name]
res
.catch (err) =>
@log "error", err
@log "error", err.stack
#### Interpipe communication
# Process add from previous pipes
_add_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "A #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@add file
else if @config.passthrough
BasePipe::add.call @, file
else
file
# Relay add to subsequent pipes
add: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._add_in arg
.then (res) ->
res.reduce (p, file) ->
name:file.name
add:p.add||file.add
, {}
else
p
# Process change from previous pipes
_change_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "M #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@change file
else if @config.passthrough
BasePipe::change.call @, file
else
file
# Relay change to subsequent pipes
change: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._change_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
# Process unlink from previous pipes
_unlink_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "D #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@unlink file
else if @config.passthrough
BasePipe::unlink.call @, file
else
file
# Relay unlink to subsequent pipes
unlink: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._unlink_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
broadcast: (req) ->
log: (level, args...) ->
@pipeline.logger.log level, "#{@config.type}(#{@config.id})", args...
class CompileInputListPipe extends BasePipe
@stateDefaults:
files: []
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files.push file.name
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
@state.files.splice (@state.files.indexOf file.name), 1
@compile() if @started
if @config.passthrough
super file
else
file
class CompileInputDataPipe extends BasePipe
@stateDefaults:
files: {}
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files[file.name] = ""
if @config.passthrough
super file
else
file
change: (file) ->
@state.files[file.name] = file.content
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
delete @state.files[file.name]
@compile() if @started
if @config.passthrough
super file
else
file
class DependsPipe extends BasePipe
@stateDefaults:
depsFor: {}
depsOn: {}
files: {}
backlog: {}
_compile: (names) ->
depsOn = @state.depsOn
names.reduce (p, i) =>
if Object.keys(depsOn[i]).length
p
else
p
.then => @fileAdd i
.then => @compile i
, RSVP.Promise.resolve()
start: ->
@started = true
depsOn = @state.depsOn
names = []
for name of @state.backlog
delete @state.backlog[name]
names.push name if not Object.keys(depsOn[name]).length
@_compile names
add: (file) ->
file.add = true
@state.depsFor[file.name] = {}
@state.depsOn[file.name] = {}
@state.files[file.name] = new Buffer ""
depsOn = @state.depsOn[file.name]
for name, deps of @state.depsFor
depsOn[name] = true if deps[file.name]
if @started
if Object.keys(depsOn).length
file
else
@fileAdd file.name
else
@state.backlog[file.name] = true
file
change: (file) ->
@state.files[file.name] = file.content
deps = @dependenciesFor file.name
old = @state.depsFor[file.name]
@state.depsFor[file.name] = {}
for dep in deps
@state.depsFor[file.name][dep] = true
deps = @state.depsFor[file.name]
add = []
unlink = []
for dep of old
# obsolete dep
if not deps[dep]
c = @state.depsOn[dep]
if c
delete c[file.name]
add.push dep
for dep of deps
# new dep
if not old[dep]
c = @state.depsOn[dep]
if c
c[file.name] = true
unlink.push dep
promise = unlink.reduce (p, i) =>
p.then => @fileUnlink i
, RSVP.Promise.resolve()
if @started
promise = promise.then => @_compile add
else
for name in add
@state.backlog[name] = true
promise.then =>
if @started
depsOn = @state.depsOn[file.name]
if Object.keys(depsOn).length
@_compile Object.keys depsOn
else
@_compile [file.name]
else
@state.backlog[file.name] = true
file
unlink: (file) ->
deps = @state.depsFor[file.name]
delete @state.depsFor[file.name]
delete @state.depsOn[file.name]
delete @state.files[file.name]
delete @state.backlog[file.name]
add = []
for dep of deps
depsOn = @state.depsOn[dep]
delete depsOn[file.name]
add.push dep
if @started
@_compile add
.then =>
@fileUnlink file.name
else
for name in add
@state.backlog[name] = true
@fileUnlink file.name
module.exports.BasePipe = BasePipe
module.exports.CompileInputListPipe = CompileInputListPipe
module.exports.CompileInputDataPipe = CompileInputDataPipe
module.exports.DependsPipe = DependsPipe
| true | crypto = require "crypto"
minimatch = require "minimatch"
RSVP = require "rsvp"
clone = (obj) ->
return obj if not obj? or typeof obj isnt 'object'
newInstance = new obj.constructor()
newInstance[key] = clone obj[key] for key of obj
newInstance
cloned = (v) -> if typeof v is "function" then v.apply @ else clone v
class BasePipe
@schema: -> {}
@configDefaults:
pattern: "**/*"
@stateDefaults:
localFiles: {}
constructor: (@config, @state, @pipeline) ->
@log "debug", "Pipe created" if @config.debug
obj = @constructor
while obj
@config[k] ?= cloned v for k, v of obj.configDefaults
@state[k] ?= cloned v for k, v of obj.stateDefaults
obj = obj.__super__?.constructor
@outputs = []
init: -> null
start: (res) -> res
stop: -> null
#### Helpers for modifications
# Clone file and modify property
modifyFile: (file, key, value) ->
nfile = {}
for k, v of file
nfile[k] = v
__key = PI:KEY:<KEY>END_PIkey}"
if nfile[__key] is undefined
nfile[__key] = file[key]
nfile[key] = value
nfile
#### Pipe local file management
# Add a new file into the pipeline.
fileAdd: (name) ->
return RSVP.Promise.resolve() if @state.localFiles[name]
@log "debug", "LA #{name}" if @config.debug
BasePipe::add.call @,
name:name
add:false
.then (res) =>
@state.localFiles[name] =
name:name
added:res.add
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Message file change
fileChange: (name, content) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LC #{name}" if @config.debug
if file.changing
@state.localFiles[name].reread = true
return
file.changing = true
if content is undefined
p = @getFileContent name
else
p = RSVP.Promise.resolve content
p
.then (content) =>
hash = crypto.createHash "sha1"
hash.update content
digest = hash.digest "hex"
if digest isnt file.hash
file.hash = digest
sendfile = @modifyFile file, "content", content
BasePipe::change.call @, sendfile
else
null
.then (res) =>
file.changing = false
if file.reread
file.reread = false
@fileChange name
res
.catch (err) =>
@log "error", err
@log "error", err.stack
# Remove a file from the pipeline
fileUnlink: (name) ->
file = @state.localFiles[name]
return if not file?.added
@log "debug", "LD #{name}" if @config.debug
BasePipe::unlink.call @, file
.then (res) =>
delete @state.localFiles[name]
res
.catch (err) =>
@log "error", err
@log "error", err.stack
#### Interpipe communication
# Process add from previous pipes
_add_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "A #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@add file
else if @config.passthrough
BasePipe::add.call @, file
else
file
# Relay add to subsequent pipes
add: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._add_in arg
.then (res) ->
res.reduce (p, file) ->
name:file.name
add:p.add||file.add
, {}
else
p
# Process change from previous pipes
_change_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "M #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@change file
else if @config.passthrough
BasePipe::change.call @, file
else
file
# Relay change to subsequent pipes
change: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._change_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
# Process unlink from previous pipes
_unlink_in: (file) ->
if minimatch file.name, @config.pattern
@log "debug", "D #{file.name}" if @config.debug
file = @modifyFile file, "name", @rename file.name if @rename
@unlink file
else if @config.passthrough
BasePipe::unlink.call @, file
else
file
# Relay unlink to subsequent pipes
unlink: (file) ->
p = RSVP.Promise.resolve file
if @outputs.length
RSVP.all @outputs.map (o) ->
p.then (arg) -> o._unlink_in arg
.then (res) ->
res.reduce (p, file) ->
if Array.isArray file
file.reduce (p, file) ->
p.push file if file not in p
p
, p
else
p.push file if file not in p
p
, []
else
p
broadcast: (req) ->
log: (level, args...) ->
@pipeline.logger.log level, "#{@config.type}(#{@config.id})", args...
class CompileInputListPipe extends BasePipe
@stateDefaults:
files: []
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files.push file.name
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
@state.files.splice (@state.files.indexOf file.name), 1
@compile() if @started
if @config.passthrough
super file
else
file
class CompileInputDataPipe extends BasePipe
@stateDefaults:
files: {}
start: ->
@started = true
super @fileAdd(@config.filename).then => @compile()
add: (file) ->
file.add = true
@state.files[file.name] = ""
if @config.passthrough
super file
else
file
change: (file) ->
@state.files[file.name] = file.content
@compile() if @started
if @config.passthrough
super file
else
file
unlink: (file) ->
delete @state.files[file.name]
@compile() if @started
if @config.passthrough
super file
else
file
class DependsPipe extends BasePipe
@stateDefaults:
depsFor: {}
depsOn: {}
files: {}
backlog: {}
_compile: (names) ->
depsOn = @state.depsOn
names.reduce (p, i) =>
if Object.keys(depsOn[i]).length
p
else
p
.then => @fileAdd i
.then => @compile i
, RSVP.Promise.resolve()
start: ->
@started = true
depsOn = @state.depsOn
names = []
for name of @state.backlog
delete @state.backlog[name]
names.push name if not Object.keys(depsOn[name]).length
@_compile names
add: (file) ->
file.add = true
@state.depsFor[file.name] = {}
@state.depsOn[file.name] = {}
@state.files[file.name] = new Buffer ""
depsOn = @state.depsOn[file.name]
for name, deps of @state.depsFor
depsOn[name] = true if deps[file.name]
if @started
if Object.keys(depsOn).length
file
else
@fileAdd file.name
else
@state.backlog[file.name] = true
file
change: (file) ->
@state.files[file.name] = file.content
deps = @dependenciesFor file.name
old = @state.depsFor[file.name]
@state.depsFor[file.name] = {}
for dep in deps
@state.depsFor[file.name][dep] = true
deps = @state.depsFor[file.name]
add = []
unlink = []
for dep of old
# obsolete dep
if not deps[dep]
c = @state.depsOn[dep]
if c
delete c[file.name]
add.push dep
for dep of deps
# new dep
if not old[dep]
c = @state.depsOn[dep]
if c
c[file.name] = true
unlink.push dep
promise = unlink.reduce (p, i) =>
p.then => @fileUnlink i
, RSVP.Promise.resolve()
if @started
promise = promise.then => @_compile add
else
for name in add
@state.backlog[name] = true
promise.then =>
if @started
depsOn = @state.depsOn[file.name]
if Object.keys(depsOn).length
@_compile Object.keys depsOn
else
@_compile [file.name]
else
@state.backlog[file.name] = true
file
unlink: (file) ->
deps = @state.depsFor[file.name]
delete @state.depsFor[file.name]
delete @state.depsOn[file.name]
delete @state.files[file.name]
delete @state.backlog[file.name]
add = []
for dep of deps
depsOn = @state.depsOn[dep]
delete depsOn[file.name]
add.push dep
if @started
@_compile add
.then =>
@fileUnlink file.name
else
for name in add
@state.backlog[name] = true
@fileUnlink file.name
module.exports.BasePipe = BasePipe
module.exports.CompileInputListPipe = CompileInputListPipe
module.exports.CompileInputDataPipe = CompileInputDataPipe
module.exports.DependsPipe = DependsPipe
|
[
{
"context": "###\n * https://github.com/jkuetemeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg K",
"end": 37,
"score": 0.9992951154708862,
"start": 26,
"tag": "USERNAME",
"value": "jkuetemeier"
},
{
"context": "temeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg Kütemei... | test/register_tasks.test.coffee | kuetemeier/gulp-tasks-common | 0 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 Jörg Kütemeier
* Licensed under the MIT license.
###
# patch error stack to display correct line numbers for CoffeeScript
require 'coffee-errors'
requireUncached = require './tools/requireuncached'
# set up test stack
chai = require 'chai'
sinon = require 'sinon'
expect = chai.expect
chai.use require 'sinon-chai'
gulp = undefined
# using compiled JavaScript file here to be sure module works
common = require('..')
describe 'register_tasks', ->
beforeEach ->
gulp = requireUncached 'gulp'
common = require './tools/common'
it 'is defined', ->
expect(common.register_tasks).to.be.a('function')
it 'can be called and registers tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
it 'throws an exception if *gulp* variable is undefined', ->
expect( ->
common.register_tasks undefined
).to.throw()
it.skip 'should generate default tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
expect(gulp.tasks.taskOwnTask).to.be.an('object')
console.log gulp.tasks
console.log gulp.tasks.taskEnabled.fn()
expect(gulp.tasks.taskEnabled.fn()).to.equal('taskEnabled')
it 'should not generate disabled tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskDisabled).to.be.undefined
expect(gulp.tasks.taskOwnTaskDisabled).to.be.undefined
| 97194 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
# patch error stack to display correct line numbers for CoffeeScript
require 'coffee-errors'
requireUncached = require './tools/requireuncached'
# set up test stack
chai = require 'chai'
sinon = require 'sinon'
expect = chai.expect
chai.use require 'sinon-chai'
gulp = undefined
# using compiled JavaScript file here to be sure module works
common = require('..')
describe 'register_tasks', ->
beforeEach ->
gulp = requireUncached 'gulp'
common = require './tools/common'
it 'is defined', ->
expect(common.register_tasks).to.be.a('function')
it 'can be called and registers tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
it 'throws an exception if *gulp* variable is undefined', ->
expect( ->
common.register_tasks undefined
).to.throw()
it.skip 'should generate default tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
expect(gulp.tasks.taskOwnTask).to.be.an('object')
console.log gulp.tasks
console.log gulp.tasks.taskEnabled.fn()
expect(gulp.tasks.taskEnabled.fn()).to.equal('taskEnabled')
it 'should not generate disabled tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskDisabled).to.be.undefined
expect(gulp.tasks.taskOwnTaskDisabled).to.be.undefined
| true | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
# patch error stack to display correct line numbers for CoffeeScript
require 'coffee-errors'
requireUncached = require './tools/requireuncached'
# set up test stack
chai = require 'chai'
sinon = require 'sinon'
expect = chai.expect
chai.use require 'sinon-chai'
gulp = undefined
# using compiled JavaScript file here to be sure module works
common = require('..')
describe 'register_tasks', ->
beforeEach ->
gulp = requireUncached 'gulp'
common = require './tools/common'
it 'is defined', ->
expect(common.register_tasks).to.be.a('function')
it 'can be called and registers tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
it 'throws an exception if *gulp* variable is undefined', ->
expect( ->
common.register_tasks undefined
).to.throw()
it.skip 'should generate default tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskEnabled).to.be.an('object')
expect(gulp.tasks.taskOwnTask).to.be.an('object')
console.log gulp.tasks
console.log gulp.tasks.taskEnabled.fn()
expect(gulp.tasks.taskEnabled.fn()).to.equal('taskEnabled')
it 'should not generate disabled tasks', ->
common.register_tasks gulp
expect(gulp.tasks.taskDisabled).to.be.undefined
expect(gulp.tasks.taskOwnTaskDisabled).to.be.undefined
|
[
{
"context": "e CoffeeScript-Object-Notation https://github.com/bevry/cson )\n\n#########################################",
"end": 155,
"score": 0.9996737837791443,
"start": 150,
"tag": "USERNAME",
"value": "bevry"
},
{
"context": "\"epub\"\n \"epub3\"\n]\n\nauthor:\n name: \"R... | package/package_base.cson | Treinetic/readium-js-viewer | 21 |
# NPM package file, see https://docs.npmjs.com/files/package.json
# Oh, this is CSON, not JSON ( see CoffeeScript-Object-Notation https://github.com/bevry/cson )
#####################################################################
#####################################################################
# Master source file used to generate package.json
# DEFAULT METHOD (package.json must exist and be valid):
# npm run cson2json
#
# ALTERNATIVE METHOD (in case package.json is corrupted):
#
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js "package/*.cson" package.cson
# &&
# node ./readium-js/readium-shared-js/node_modules/cson/bin/cson2json package.cson > package_.json
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.json
# &&
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js package_.json package.json utf8
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.cson
# &&
# node readium-js/readium-shared-js/readium-build-tools/optimizePackageJsonScripts.js
#
# Note that on some Windows terminals / command line environment (e.g. GitShell),
# the package.json file generated by the ALTERNATIVE method above
# may be encoded with ucs2 instead of utf8, resulting in a script failure.
# Simply replace the "utf8" parameter in the 4th command line above with "ucs2".
#####################################################################
#####################################################################
'/*': '----------------------------------------------------------------------------'
'..': 'This package.json (JSON) is automatically generated from package/*.cson (CSON)'
'.-': "DO NOT invoke 'npm init' or 'npm install --save' ('--save-dev' / '--save-optional')"
'._': 'as this would overwrite / update the JSON, not the master CSON!'
'*/': '----------------------------------------------------------------------------'
#####################################################################
name: "readium-js-viewer"
version: "0.31.1"
engines:
node: ">=4"
npm: ">=2"
yarn: ">=0.23"
description: "Readium web app - online cloud reader, offline Chrome extension/app"
keywords: [
"readium"
"epub"
"epub3"
]
author:
name: "Readium (Daniel Weck)"
email: "daniel.weck@gmail.com"
url: "http://github.com/readium"
license: "BSD-3-Clause"
licenses: [
type: "BSD-3-Clause"
url: "http://opensource.org/licenses/BSD-3-Clause"
]
homepage: "http://github.com/Treinetic/readium-js-viewer"
bugs:
url: "http://github.com/Treinetic/readium-js-viewer/issues"
email: "daniel.weck@gmail.com"
repository:
type: "git"
url: "git://github.com/Treinetic/readium-js-viewer.git"
#####################################################################
files: [
"build-output/"
"dev/"
"dist/"
"license.txt"
"README.md"
]
# "main" is designed for Node/CommonJS modules, not RequireJS/AMD ones.
# See http://requirejs.org/docs/commonjs.html
# See http://requirejs.org/docs/node.html
#
#main: "build-output/_multiple-bundles/readium-js-viewer.js"
#####################################################################
dependencies:
#"parallelshell": 'latest'
"concurrently": '^3.6.0'
"npm-run-all": '^4.1.3'
"opener": '^1.4.3'
#"http-server": 'latest'
"http-server": 'github:danielweck/http-server#master'
# SEE: https://github.com/indexzero/http-server/issues/154
#####################################################################
# These dependencies can be fetched by using the "--dev" option
# in "npm install --dev readium-js-viewer".
devDependencies:
# see readium-js:
# "crypto-js", "zipjs", "requirejs-text"
# see readium-shared-js:
# "requirejs", "almond", "jquery"
#"keymaster": 'latest'
"keymaster": 'github:danielweck/keymaster#master'
#"jath": 'latest'
"jath": 'github:danielweck/jath#master'
# "libxmljs" node-gyp python < v3 :(
#"jquery-xpath": 'latest'
"mathjax-single-file": 'github:danielweck/MathJax-single-file#master'
"bootstrap": '3.x'
"bootstrap-accessibility-plugin": 'github:paypal/bootstrap-accessibility-plugin#master'
"hammerjs": '^2.0.8'
"jquery-hammerjs": '^2.0.0'
"screenfull": '^3.3.2'
"JSON2": '^0.1.0'
"hogan": '^1.0.2'
"requirejs-hogan-plugin": '^0.3.1'
"remotestoragejs": '^1.0.3'
"spin.js": '2.x'
# Below dependencies are only used in build tasks (above are runtime deps):
# see readium-shared-js:
# "cson", "jshint", "glob", "rimraf", "mkdirp", "watch", "live-reload"
# see readium-build-tools (in readium-shared-js)
# versionsMaker.js
"gift": '^0.10.1'
# Automatically included with the CLI
# "clean-css": 'latest'
"clean-css-cli": '^4.1.10'
"uglify-js": '^3.4.1'
#"nodeunit": 'latest'
"mocha": '^5.2.0'
"wd": '^1.9.0'
"phantomjs-prebuilt": '^2.1.16'
#"grunt": 'latest'
"grunt-selenium-webdriver": 'github:levexis/grunt-selenium-webdriver#master'
#"selenium-webdriver": 'latest'
"crx": '^3.2.1'
# https://github.com/domenic/chai-as-promised/issues/184
"chai": '3.x'
"chai-as-promised": '^7.1.1'
# TODO now @octokit/rest
"github": '12.x'
"hogan.js": '^3.0.2'
"httpreq": '^0.4.24'
"xml2js": '^0.4.19'
"decompress-zip": '^0.3.1'
#"unzip": 'latest'
| 136771 |
# NPM package file, see https://docs.npmjs.com/files/package.json
# Oh, this is CSON, not JSON ( see CoffeeScript-Object-Notation https://github.com/bevry/cson )
#####################################################################
#####################################################################
# Master source file used to generate package.json
# DEFAULT METHOD (package.json must exist and be valid):
# npm run cson2json
#
# ALTERNATIVE METHOD (in case package.json is corrupted):
#
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js "package/*.cson" package.cson
# &&
# node ./readium-js/readium-shared-js/node_modules/cson/bin/cson2json package.cson > package_.json
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.json
# &&
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js package_.json package.json utf8
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.cson
# &&
# node readium-js/readium-shared-js/readium-build-tools/optimizePackageJsonScripts.js
#
# Note that on some Windows terminals / command line environment (e.g. GitShell),
# the package.json file generated by the ALTERNATIVE method above
# may be encoded with ucs2 instead of utf8, resulting in a script failure.
# Simply replace the "utf8" parameter in the 4th command line above with "ucs2".
#####################################################################
#####################################################################
'/*': '----------------------------------------------------------------------------'
'..': 'This package.json (JSON) is automatically generated from package/*.cson (CSON)'
'.-': "DO NOT invoke 'npm init' or 'npm install --save' ('--save-dev' / '--save-optional')"
'._': 'as this would overwrite / update the JSON, not the master CSON!'
'*/': '----------------------------------------------------------------------------'
#####################################################################
name: "readium-js-viewer"
version: "0.31.1"
engines:
node: ">=4"
npm: ">=2"
yarn: ">=0.23"
description: "Readium web app - online cloud reader, offline Chrome extension/app"
keywords: [
"readium"
"epub"
"epub3"
]
author:
name: "Readium (<NAME>)"
email: "<EMAIL>"
url: "http://github.com/readium"
license: "BSD-3-Clause"
licenses: [
type: "BSD-3-Clause"
url: "http://opensource.org/licenses/BSD-3-Clause"
]
homepage: "http://github.com/Treinetic/readium-js-viewer"
bugs:
url: "http://github.com/Treinetic/readium-js-viewer/issues"
email: "<EMAIL>"
repository:
type: "git"
url: "git://github.com/Treinetic/readium-js-viewer.git"
#####################################################################
files: [
"build-output/"
"dev/"
"dist/"
"license.txt"
"README.md"
]
# "main" is designed for Node/CommonJS modules, not RequireJS/AMD ones.
# See http://requirejs.org/docs/commonjs.html
# See http://requirejs.org/docs/node.html
#
#main: "build-output/_multiple-bundles/readium-js-viewer.js"
#####################################################################
dependencies:
#"parallelshell": 'latest'
"concurrently": '^3.6.0'
"npm-run-all": '^4.1.3'
"opener": '^1.4.3'
#"http-server": 'latest'
"http-server": 'github:danielweck/http-server#master'
# SEE: https://github.com/indexzero/http-server/issues/154
#####################################################################
# These dependencies can be fetched by using the "--dev" option
# in "npm install --dev readium-js-viewer".
devDependencies:
# see readium-js:
# "crypto-js", "zipjs", "requirejs-text"
# see readium-shared-js:
# "requirejs", "almond", "jquery"
#"keymaster": 'latest'
"keymaster": 'github:danielweck/keymaster#master'
#"jath": 'latest'
"jath": 'github:danielweck/jath#master'
# "libxmljs" node-gyp python < v3 :(
#"jquery-xpath": 'latest'
"mathjax-single-file": 'github:danielweck/MathJax-single-file#master'
"bootstrap": '3.x'
"bootstrap-accessibility-plugin": 'github:paypal/bootstrap-accessibility-plugin#master'
"hammerjs": '^2.0.8'
"jquery-hammerjs": '^2.0.0'
"screenfull": '^3.3.2'
"JSON2": '^0.1.0'
"hogan": '^1.0.2'
"requirejs-hogan-plugin": '^0.3.1'
"remotestoragejs": '^1.0.3'
"spin.js": '2.x'
# Below dependencies are only used in build tasks (above are runtime deps):
# see readium-shared-js:
# "cson", "jshint", "glob", "rimraf", "mkdirp", "watch", "live-reload"
# see readium-build-tools (in readium-shared-js)
# versionsMaker.js
"gift": '^0.10.1'
# Automatically included with the CLI
# "clean-css": 'latest'
"clean-css-cli": '^4.1.10'
"uglify-js": '^3.4.1'
#"nodeunit": 'latest'
"mocha": '^5.2.0'
"wd": '^1.9.0'
"phantomjs-prebuilt": '^2.1.16'
#"grunt": 'latest'
"grunt-selenium-webdriver": 'github:levexis/grunt-selenium-webdriver#master'
#"selenium-webdriver": 'latest'
"crx": '^3.2.1'
# https://github.com/domenic/chai-as-promised/issues/184
"chai": '3.x'
"chai-as-promised": '^7.1.1'
# TODO now @octokit/rest
"github": '12.x'
"hogan.js": '^3.0.2'
"httpreq": '^0.4.24'
"xml2js": '^0.4.19'
"decompress-zip": '^0.3.1'
#"unzip": 'latest'
| true |
# NPM package file, see https://docs.npmjs.com/files/package.json
# Oh, this is CSON, not JSON ( see CoffeeScript-Object-Notation https://github.com/bevry/cson )
#####################################################################
#####################################################################
# Master source file used to generate package.json
# DEFAULT METHOD (package.json must exist and be valid):
# npm run cson2json
#
# ALTERNATIVE METHOD (in case package.json is corrupted):
#
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js "package/*.cson" package.cson
# &&
# node ./readium-js/readium-shared-js/node_modules/cson/bin/cson2json package.cson > package_.json
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.json
# &&
# node ./readium-js/readium-shared-js/readium-build-tools/concat.js package_.json package.json utf8
# &&
# node ./readium-js/readium-shared-js/node_modules/rimraf/bin.js package.cson
# &&
# node readium-js/readium-shared-js/readium-build-tools/optimizePackageJsonScripts.js
#
# Note that on some Windows terminals / command line environment (e.g. GitShell),
# the package.json file generated by the ALTERNATIVE method above
# may be encoded with ucs2 instead of utf8, resulting in a script failure.
# Simply replace the "utf8" parameter in the 4th command line above with "ucs2".
#####################################################################
#####################################################################
'/*': '----------------------------------------------------------------------------'
'..': 'This package.json (JSON) is automatically generated from package/*.cson (CSON)'
'.-': "DO NOT invoke 'npm init' or 'npm install --save' ('--save-dev' / '--save-optional')"
'._': 'as this would overwrite / update the JSON, not the master CSON!'
'*/': '----------------------------------------------------------------------------'
#####################################################################
name: "readium-js-viewer"
version: "0.31.1"
engines:
node: ">=4"
npm: ">=2"
yarn: ">=0.23"
description: "Readium web app - online cloud reader, offline Chrome extension/app"
keywords: [
"readium"
"epub"
"epub3"
]
author:
name: "Readium (PI:NAME:<NAME>END_PI)"
email: "PI:EMAIL:<EMAIL>END_PI"
url: "http://github.com/readium"
license: "BSD-3-Clause"
licenses: [
type: "BSD-3-Clause"
url: "http://opensource.org/licenses/BSD-3-Clause"
]
homepage: "http://github.com/Treinetic/readium-js-viewer"
bugs:
url: "http://github.com/Treinetic/readium-js-viewer/issues"
email: "PI:EMAIL:<EMAIL>END_PI"
repository:
type: "git"
url: "git://github.com/Treinetic/readium-js-viewer.git"
#####################################################################
files: [
"build-output/"
"dev/"
"dist/"
"license.txt"
"README.md"
]
# "main" is designed for Node/CommonJS modules, not RequireJS/AMD ones.
# See http://requirejs.org/docs/commonjs.html
# See http://requirejs.org/docs/node.html
#
#main: "build-output/_multiple-bundles/readium-js-viewer.js"
#####################################################################
dependencies:
#"parallelshell": 'latest'
"concurrently": '^3.6.0'
"npm-run-all": '^4.1.3'
"opener": '^1.4.3'
#"http-server": 'latest'
"http-server": 'github:danielweck/http-server#master'
# SEE: https://github.com/indexzero/http-server/issues/154
#####################################################################
# These dependencies can be fetched by using the "--dev" option
# in "npm install --dev readium-js-viewer".
devDependencies:
# see readium-js:
# "crypto-js", "zipjs", "requirejs-text"
# see readium-shared-js:
# "requirejs", "almond", "jquery"
#"keymaster": 'latest'
"keymaster": 'github:danielweck/keymaster#master'
#"jath": 'latest'
"jath": 'github:danielweck/jath#master'
# "libxmljs" node-gyp python < v3 :(
#"jquery-xpath": 'latest'
"mathjax-single-file": 'github:danielweck/MathJax-single-file#master'
"bootstrap": '3.x'
"bootstrap-accessibility-plugin": 'github:paypal/bootstrap-accessibility-plugin#master'
"hammerjs": '^2.0.8'
"jquery-hammerjs": '^2.0.0'
"screenfull": '^3.3.2'
"JSON2": '^0.1.0'
"hogan": '^1.0.2'
"requirejs-hogan-plugin": '^0.3.1'
"remotestoragejs": '^1.0.3'
"spin.js": '2.x'
# Below dependencies are only used in build tasks (above are runtime deps):
# see readium-shared-js:
# "cson", "jshint", "glob", "rimraf", "mkdirp", "watch", "live-reload"
# see readium-build-tools (in readium-shared-js)
# versionsMaker.js
"gift": '^0.10.1'
# Automatically included with the CLI
# "clean-css": 'latest'
"clean-css-cli": '^4.1.10'
"uglify-js": '^3.4.1'
#"nodeunit": 'latest'
"mocha": '^5.2.0'
"wd": '^1.9.0'
"phantomjs-prebuilt": '^2.1.16'
#"grunt": 'latest'
"grunt-selenium-webdriver": 'github:levexis/grunt-selenium-webdriver#master'
#"selenium-webdriver": 'latest'
"crx": '^3.2.1'
# https://github.com/domenic/chai-as-promised/issues/184
"chai": '3.x'
"chai-as-promised": '^7.1.1'
# TODO now @octokit/rest
"github": '12.x'
"hogan.js": '^3.0.2'
"httpreq": '^0.4.24'
"xml2js": '^0.4.19'
"decompress-zip": '^0.3.1'
#"unzip": 'latest'
|
[
{
"context": "ething went wrong, please contact <a href='mailto:info@are.na'>info@are.na</a> if the problem persists.\")\n",
"end": 1428,
"score": 0.9997109174728394,
"start": 1417,
"tag": "EMAIL",
"value": "info@are.na"
},
{
"context": "rong, please contact <a href='mailto:info@are.... | apps/tools/components/send_invitation/index.coffee | 1aurabrown/ervell | 0 | Promise = require 'bluebird-q'
{ API_URL } = require('sharify').data
{ track } = require '../../../../lib/analytics.coffee'
Serializer = require '../../../../components/form/serializer.coffee'
module.exports = ($el) ->
$form = $el.find '.js-form'
$submit = $el.find '.js-submit'
$errors = $el.find '.js-errors'
submissionTimeout = null
label = $submit.text()
onSuccess = () ->
$form.trigger 'reset'
$submit
.prop 'disabled', false
.text 'Sent!'
submissionTimeout = setTimeout (-> $submit.text label), 2500
track.submit 'Invitation sent from user'
onFailure = (message) ->
$errors
.show()
.html message
$submit
.prop 'disabled', false
.text 'Error'
submissionTimeout = setTimeout ->
$submit.text label
$errors.empty()
, 5000
track.error 'Invitation not sent, try again.'
$form.on 'submit', (e) ->
e.preventDefault()
clearTimeout(submissionTimeout)
serializer = new Serializer $form
$errors.empty()
$submit
.prop 'disabled', true
.text 'Sending...'
Promise $.ajax
url: "#{API_URL}/invitees/invite"
type: 'POST'
data: serializer.data()
.then onSuccess
.catch ({ responseJSON: { message, description }}) ->
onFailure("#{message}<br/>#{description}")
.catch ->
onFailure("Something went wrong, please contact <a href='mailto:info@are.na'>info@are.na</a> if the problem persists.")
| 156362 | Promise = require 'bluebird-q'
{ API_URL } = require('sharify').data
{ track } = require '../../../../lib/analytics.coffee'
Serializer = require '../../../../components/form/serializer.coffee'
module.exports = ($el) ->
$form = $el.find '.js-form'
$submit = $el.find '.js-submit'
$errors = $el.find '.js-errors'
submissionTimeout = null
label = $submit.text()
onSuccess = () ->
$form.trigger 'reset'
$submit
.prop 'disabled', false
.text 'Sent!'
submissionTimeout = setTimeout (-> $submit.text label), 2500
track.submit 'Invitation sent from user'
onFailure = (message) ->
$errors
.show()
.html message
$submit
.prop 'disabled', false
.text 'Error'
submissionTimeout = setTimeout ->
$submit.text label
$errors.empty()
, 5000
track.error 'Invitation not sent, try again.'
$form.on 'submit', (e) ->
e.preventDefault()
clearTimeout(submissionTimeout)
serializer = new Serializer $form
$errors.empty()
$submit
.prop 'disabled', true
.text 'Sending...'
Promise $.ajax
url: "#{API_URL}/invitees/invite"
type: 'POST'
data: serializer.data()
.then onSuccess
.catch ({ responseJSON: { message, description }}) ->
onFailure("#{message}<br/>#{description}")
.catch ->
onFailure("Something went wrong, please contact <a href='mailto:<EMAIL>'><EMAIL></a> if the problem persists.")
| true | Promise = require 'bluebird-q'
{ API_URL } = require('sharify').data
{ track } = require '../../../../lib/analytics.coffee'
Serializer = require '../../../../components/form/serializer.coffee'
module.exports = ($el) ->
$form = $el.find '.js-form'
$submit = $el.find '.js-submit'
$errors = $el.find '.js-errors'
submissionTimeout = null
label = $submit.text()
onSuccess = () ->
$form.trigger 'reset'
$submit
.prop 'disabled', false
.text 'Sent!'
submissionTimeout = setTimeout (-> $submit.text label), 2500
track.submit 'Invitation sent from user'
onFailure = (message) ->
$errors
.show()
.html message
$submit
.prop 'disabled', false
.text 'Error'
submissionTimeout = setTimeout ->
$submit.text label
$errors.empty()
, 5000
track.error 'Invitation not sent, try again.'
$form.on 'submit', (e) ->
e.preventDefault()
clearTimeout(submissionTimeout)
serializer = new Serializer $form
$errors.empty()
$submit
.prop 'disabled', true
.text 'Sending...'
Promise $.ajax
url: "#{API_URL}/invitees/invite"
type: 'POST'
data: serializer.data()
.then onSuccess
.catch ({ responseJSON: { message, description }}) ->
onFailure("#{message}<br/>#{description}")
.catch ->
onFailure("Something went wrong, please contact <a href='mailto:PI:EMAIL:<EMAIL>END_PI'>PI:EMAIL:<EMAIL>END_PI</a> if the problem persists.")
|
[
{
"context": " _userProperty : 'user'\n _key : 'passport'\n req.__defineGetter__ '_passport', =>\n insta",
"end": 228,
"score": 0.9228522777557373,
"start": 220,
"tag": "KEY",
"value": "passport"
},
{
"context": "rt', =>\n instance: passport\n session : use... | node.js/node_modules/passport-stub/lib.coffee | stanxii/ngb | 3 | done = (user, done) -> done null, user
passportStub = (req, res, next) =>
return next() unless @active
passport =
deserializeUser: done
serializeUser : done
_userProperty : 'user'
_key : 'passport'
req.__defineGetter__ '_passport', =>
instance: passport
session : user: @user
req.__defineGetter__ 'user', => @user
next()
exports.install = (@app) -> @app.stack.unshift
route: ''
handle: passportStub
_id: 'passport.stub'
exports.uninstall = ->
return unless @app?
@app.stack.forEach (middleware, index, stack) ->
stack.splice index, 1 if middleware._id is 'passport.stub'
exports.login = (user) ->
throw new Error 'Passport Stub not installed.
Please run "passportStub.install(app)" first.' unless @app?
@active = yes
@user = user
exports.logout = -> @active = no | 130320 | done = (user, done) -> done null, user
passportStub = (req, res, next) =>
return next() unless @active
passport =
deserializeUser: done
serializeUser : done
_userProperty : 'user'
_key : '<KEY>'
req.__defineGetter__ '_passport', =>
instance: passport
session : user: @user
req.__defineGetter__ 'user', => @user
next()
exports.install = (@app) -> @app.stack.unshift
route: ''
handle: passportStub
_id: 'passport.stub'
exports.uninstall = ->
return unless @app?
@app.stack.forEach (middleware, index, stack) ->
stack.splice index, 1 if middleware._id is 'passport.stub'
exports.login = (user) ->
throw new Error 'Passport Stub not installed.
Please run "passportStub.install(app)" first.' unless @app?
@active = yes
@user = user
exports.logout = -> @active = no | true | done = (user, done) -> done null, user
passportStub = (req, res, next) =>
return next() unless @active
passport =
deserializeUser: done
serializeUser : done
_userProperty : 'user'
_key : 'PI:KEY:<KEY>END_PI'
req.__defineGetter__ '_passport', =>
instance: passport
session : user: @user
req.__defineGetter__ 'user', => @user
next()
exports.install = (@app) -> @app.stack.unshift
route: ''
handle: passportStub
_id: 'passport.stub'
exports.uninstall = ->
return unless @app?
@app.stack.forEach (middleware, index, stack) ->
stack.splice index, 1 if middleware._id is 'passport.stub'
exports.login = (user) ->
throw new Error 'Passport Stub not installed.
Please run "passportStub.install(app)" first.' unless @app?
@active = yes
@user = user
exports.logout = -> @active = no |
[
{
"context": "9\\[\\]/\"'\\*=~\\-\\u2013\\u2014])|$)///gi\n\t,\n\t\tosis: [\"Jer\"]\n\t\tregexp: ///(^|#{bcv_parser::regexps.pre_book}",
"end": 15641,
"score": 0.9159262180328369,
"start": 15638,
"tag": "NAME",
"value": "Jer"
},
{
"context": "exp: ///(^|#{bcv_parser::regexps.pre_book... | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/sk/regexps.coffee | saiba-mais/bible-lessons | 0 | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| ver[šs]ov | kapitoly | kapitole | kapitolu | kapitol | hlavy | a[žz] | porov | pozri | alebo | kap | ff | - | a
| [b-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [b-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:Prv[áa]#{bcv_parser::regexps.space}+kniha|Prv[ýy]#{bcv_parser::regexps.space}+list|Prv[áa]|Prv[ýy]|1#{bcv_parser::regexps.space}+k|I|1)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:Druh[áa]#{bcv_parser::regexps.space}+kniha|Druh[ýy]#{bcv_parser::regexps.space}+list|Druh[áa]|Druh[ýy]|2#{bcv_parser::regexps.space}+k|II|2)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:Tretia#{bcv_parser::regexps.space}+kniha|Tretia|Tret[íi]|3#{bcv_parser::regexps.space}+k|III|3)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:porov|pozri|alebo|a)|(?:a[žz]|-))"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|(?:a[žz]|-))"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|K(?:niha|\.)?[\s\xa0]*stvorenia|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:1[\s\xa0]*k|I)[\s\xa0]*Moj[zž]i[sš]ova|K(?:niha[\s\xa0]*p[o\xF4]|\.[\s\xa0]*p[o\xF4]|[\s\xa0]*p[o\xF4])vodu|1[\s\xa0]*Moj[zž]i[sš]ova|G(?:enezis|n)|1Moj|Gen|1[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:2[\s\xa0]*k|II)[\s\xa0]*Moj[zž]i[sš]ova|2[\s\xa0]*Moj[zž]i[sš]ova|Exodus|Ex(?:od)?|2Moj|2[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:B[e\xE9]l(?:[\s\xa0]*a[\s\xa0]*drak)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[\s\xa0]*Moj[zž]i[sš])|\xED[\s\xa0]*Moj[zž]i[sš])ova|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:III|3[\s\xa0]*k)[\s\xa0]*Moj[zž]i[sš]ova|3[\s\xa0]*Moj[zž]i[sš]ova|L(?:evitikus|v)|3Moj|Lev|3[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:4[\s\xa0]*k|IV)[\s\xa0]*Moj[zž]i[sš]ova|4[\s\xa0]*Moj[zž]i[sš]ova|N(?:umeri|m)|4Moj|Num|4[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|\.[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus))|Sir(?:achovcova|achovec)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:M[u\xFA]d(?:ros(?:ti?|ť))?|Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pla[cč][\s\xa0]*Jeremi[a\xE1][sš]ov|Jeremi[a\xE1][sš]ov[\s\xa0]*Pla[cč]|K(?:niha[\s\xa0]*n[a\xE1]|\.[\s\xa0]*n[a\xE1]|[\s\xa0]*n[a\xE1])rekov|[ZŽ]alospevy|[ZŽ]alosp|N[a\xE1]reky|N[a\xE1]r|Lam)|(?:Pla[cč])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jeremi[a\xE1][sš]ov[\s\xa0]*list|EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zj(?:av(?:enie(?:[\s\xa0]*(?:Apo[sš]tola[\s\xa0]*J[a\xE1]|sv[a\xE4]t[e\xE9]ho[\s\xa0]*J[a\xE1]|J[a\xE1])na)?)?|v)?|Apokalypsa|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Manasesova[\s\xa0]*modlitba|PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Piata[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:5(?:[\s\xa0]*k)?|V)\.[\s\xa0]*Moj[zž]i[sš]ova|D(?:euteron[o\xF3]mium|t)|(?:5[\s\xa0]*k|V)[\s\xa0]*Moj[zž]i[sš]ova|5[\s\xa0]*Moj[zž]i[sš]ova|Deut|5[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xF3zu(?:ov)?a|o(?:z(?:uova|u[ae])?|šu(?:ov)?a|s(?:u(?:ov)?a|h)))|Iosua)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K\.?[\s\xa0]*sudcov|S(?:udcovia|dc)|Sud(?:cov)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:R(?:uth?|\xFAt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:1[\s\xa0]*k|I)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|1(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:2[\s\xa0]*k|II)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|2(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:I(?:z(?:a[ij][a\xE1][sš])?|sa))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Samuelova|(?:2[\s\xa0]*k|II)[\s\xa0]*Samuelova|2(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Samuelova|(?:1[\s\xa0]*k|I)[\s\xa0]*Samuelova|1(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|Druh[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Druh[y\xFD][\s\xa0]*list|(?:4[\s\xa0]*k|2(?:[\s\xa0]*k)?|I[IV]|4)\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Druh[y\xFD]|4)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:4[\s\xa0]*k|2[\s\xa0]*k|I[IV])[\s\xa0]*Kr[a\xE1][lľ]ov|2(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])|[\s\xa0]*Kr[a\xE1][lľ])|\xED[\s\xa0]*Kr[a\xE1][lľ])ov|Prv[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Prv[y\xFD][\s\xa0]*list|(?:III|3[\s\xa0]*k|1(?:[\s\xa0]*k)?|[3I])\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Prv[y\xFD]|3)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:III|3[\s\xa0]*k|1[\s\xa0]*k|I)[\s\xa0]*Kr[a\xE1][lľ]ov|1(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|2(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|1(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:d(?:r[a\xE1][sš])?|ra))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:G(?:r[e\xE9]cke[\s\xa0]*[cč]asti[\s\xa0]*knihy[\s\xa0]*Ester|kEsth)|Ester[\s\xa0]*gr)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*J[o\xF3]|\.[\s\xa0]*J[o\xF3]|[\s\xa0]*J[o\xF3])bova|J[o\xF3]b)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*[zž]|\.[\s\xa0]*[zž]|[\s\xa0]*[zž])almov|[ZŽ]al(?:t[a\xE1]r|my)|[ZŽ](?:alm)?|Ps)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Azarj[a\xE1][sš]ova[\s\xa0]*modlitba|PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*pr[i\xED]slov[i\xED]|\.[\s\xa0]*pr[i\xED]slov[i\xED]|[\s\xa0]*pr[i\xED]slov[i\xED])|Pr(?:[i\xED]slovia|[i\xED]sl?|ov)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:oh(?:elet(?:[\s\xa0]*—[\s\xa0]*Kazate[lľ])?)?|(?:niha[\s\xa0]*kazate[lľ]|\.[\s\xa0]*kazate[lľ]|[\s\xa0]*kazate[lľ])ova|azate[lľ]|az)|E(?:kleziastes|ccl))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Traja[\s\xa0]*ml[a\xE1]denci[\s\xa0]*v[\s\xa0]*rozp[a\xE1]lenej[\s\xa0]*peci|Piese[nň][\s\xa0]*ml[a\xE1]dencov[\s\xa0]*v[\s\xa0]*ohnivej[\s\xa0]*peci|SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:V(?:e[lľ]p(?:iese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova)?|[lľ]p)|Piese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova|P(?:iese[nň][\s\xa0]*piesn[i\xED]|Š)|Pies|Song)|(?:Ve[lľ]piese[nň]|Piese[nň])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jer"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:e(?:chiel|k))?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:iel)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ho(?:ze[a\xE1][sš]|s)|Oz(?:e[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Joel)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:[A\xC1]m(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:edi[a\xE1][sš]|ad(?:i[a\xE1][sš])?)|Abd(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jonah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jon(?:\xE1[sš]|a[hsš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mic(?:h(?:e[a\xE1][sš])?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xE1hum|ah(?:um)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?|Ab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sof(?:oni[a\xE1][sš])?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:geus)?|Ag(?:geus|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Z(?:ach(?:ari[a\xE1][sš])?|ech))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:achi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Mat[u\xFA][sš]a|M(?:at(?:[u\xFA][sš]a|t)|at(?:[u\xFA][sš])?|t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Marka|M(?:ar(?:ka|ek)|ark|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Luk[a\xE1][sš]a|L(?:uk(?:[a\xE1][sš]a|e)|uk[a\xE1][sš]|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*J[a\xE1]nov|(?:1[\s\xa0]*k|I)[\s\xa0]*J[a\xE1]nov|1(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Prv[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*J[a\xE1]nov|(?:2[\s\xa0]*k|II)[\s\xa0]*J[a\xE1]nov|2(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Druh[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|\xED[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*J[a\xE1]nov|(?:III|3[\s\xa0]*k)[\s\xa0]*J[a\xE1]nov|3(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Tret[i\xED][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*J[a\xE1]na|J(?:(?:oh)?n|[a\xE1]na|[a\xE1]n))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sk(?:utky(?:[\s\xa0]*apo[sš]tolov)?)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:(?:List[\s\xa0]*Rimano|R(?:\xEDmsky|imsky|imano|i|o))m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:2[\s\xa0]*k|II)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|2(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:1[\s\xa0]*k|I)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|1(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Gala[tť]anom|Ga(?:latsk[y\xFD]m|latanom|laťanom|l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Efezanom|E(?:fezsk[y\xFD]m|fezanom|ph|f))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filipanom|Filipsk[y\xFD]m|Filipanom|Phil|Fil|Flp)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Kolosanom|Kolosensk[y\xFD]m|Kolosanom|[CK]ol)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|2(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|1(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Timotej?ovi|(?:2[\s\xa0]*k|II)[\s\xa0]*Timotej?ovi|2(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Timotej?ovi|(?:1[\s\xa0]*k|I)[\s\xa0]*Timotej?ovi|1(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*T[i\xED]tovi|T(?:[i\xED]tovi|itus|[i\xED]t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filem[o\xF3]novi|Filemonovi|(?:File|(?:Ph|F)l)m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Hebrejom|Hebrejom|[ZŽ]idom|Hebr?|[ZŽ]id)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:a(?:k(?:ubov(?:[\s\xa0]*List)?)?|s)|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Petrov|(?:2[\s\xa0]*k|II)[\s\xa0]*Petrov|2(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Petrov|(?:1[\s\xa0]*k|I)[\s\xa0]*Petrov|1(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xFAd(?:ov(?:[\s\xa0]*List)?)?|ud(?:ov(?:[\s\xa0]*List)?|e)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha|\.)?[\s\xa0]*Juditina|J(?:udita|udit|dt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Proroctvo[\s\xa0]*Baruchovo|Bar(?:uch)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zuzan[ae]|Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ma(?:ch|k)|Ma(?:ch|k))|[y\xFD][\s\xa0]*list[\s\xa0]*Ma(?:ch|k)|[y\xFD][\s\xa0]*Ma(?:ch|k))abejcov|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ma(?:ch|k)abejcov|(?:2[\s\xa0]*k|II)[\s\xa0]*Ma(?:ch|k)abejcov|2(?:[\s\xa0]*Ma(?:ch|k)abejcov|[\s\xa0]*Ma(?:ch|k)|Macc))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*)?|[\s\xa0]*)|\xED[\s\xa0]*)Machabejcov|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Machabejcov|(?:III|3[\s\xa0]*k)[\s\xa0]*Machabejcov|3(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Machabejcov|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Machabejcov|(?:4[\s\xa0]*k|IV)[\s\xa0]*Machabejcov|4(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Mach|Ma(?:ch|k))|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*Mach)abejcov|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Machabejcov|(?:1[\s\xa0]*k|I)[\s\xa0]*Machabejcov|1(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
| 57382 | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| ver[šs]ov | kapitoly | kapitole | kapitolu | kapitol | hlavy | a[žz] | porov | pozri | alebo | kap | ff | - | a
| [b-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [b-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:Prv[áa]#{bcv_parser::regexps.space}+kniha|Prv[ýy]#{bcv_parser::regexps.space}+list|Prv[áa]|Prv[ýy]|1#{bcv_parser::regexps.space}+k|I|1)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:Druh[áa]#{bcv_parser::regexps.space}+kniha|Druh[ýy]#{bcv_parser::regexps.space}+list|Druh[áa]|Druh[ýy]|2#{bcv_parser::regexps.space}+k|II|2)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:Tretia#{bcv_parser::regexps.space}+kniha|Tretia|Tret[íi]|3#{bcv_parser::regexps.space}+k|III|3)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:porov|pozri|alebo|a)|(?:a[žz]|-))"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|(?:a[žz]|-))"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|K(?:niha|\.)?[\s\xa0]*stvorenia|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:1[\s\xa0]*k|I)[\s\xa0]*Moj[zž]i[sš]ova|K(?:niha[\s\xa0]*p[o\xF4]|\.[\s\xa0]*p[o\xF4]|[\s\xa0]*p[o\xF4])vodu|1[\s\xa0]*Moj[zž]i[sš]ova|G(?:enezis|n)|1Moj|Gen|1[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:2[\s\xa0]*k|II)[\s\xa0]*Moj[zž]i[sš]ova|2[\s\xa0]*Moj[zž]i[sš]ova|Exodus|Ex(?:od)?|2Moj|2[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:B[e\xE9]l(?:[\s\xa0]*a[\s\xa0]*drak)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[\s\xa0]*Moj[zž]i[sš])|\xED[\s\xa0]*Moj[zž]i[sš])ova|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:III|3[\s\xa0]*k)[\s\xa0]*Moj[zž]i[sš]ova|3[\s\xa0]*Moj[zž]i[sš]ova|L(?:evitikus|v)|3Moj|Lev|3[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:4[\s\xa0]*k|IV)[\s\xa0]*Moj[zž]i[sš]ova|4[\s\xa0]*Moj[zž]i[sš]ova|N(?:umeri|m)|4Moj|Num|4[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|\.[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus))|Sir(?:achovcova|achovec)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:M[u\xFA]d(?:ros(?:ti?|ť))?|Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pla[cč][\s\xa0]*Jeremi[a\xE1][sš]ov|Jeremi[a\xE1][sš]ov[\s\xa0]*Pla[cč]|K(?:niha[\s\xa0]*n[a\xE1]|\.[\s\xa0]*n[a\xE1]|[\s\xa0]*n[a\xE1])rekov|[ZŽ]alospevy|[ZŽ]alosp|N[a\xE1]reky|N[a\xE1]r|Lam)|(?:Pla[cč])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jeremi[a\xE1][sš]ov[\s\xa0]*list|EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zj(?:av(?:enie(?:[\s\xa0]*(?:Apo[sš]tola[\s\xa0]*J[a\xE1]|sv[a\xE4]t[e\xE9]ho[\s\xa0]*J[a\xE1]|J[a\xE1])na)?)?|v)?|Apokalypsa|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Manasesova[\s\xa0]*modlitba|PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Piata[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:5(?:[\s\xa0]*k)?|V)\.[\s\xa0]*Moj[zž]i[sš]ova|D(?:euteron[o\xF3]mium|t)|(?:5[\s\xa0]*k|V)[\s\xa0]*Moj[zž]i[sš]ova|5[\s\xa0]*Moj[zž]i[sš]ova|Deut|5[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xF3zu(?:ov)?a|o(?:z(?:uova|u[ae])?|šu(?:ov)?a|s(?:u(?:ov)?a|h)))|Iosua)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K\.?[\s\xa0]*sudcov|S(?:udcovia|dc)|Sud(?:cov)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:R(?:uth?|\xFAt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:1[\s\xa0]*k|I)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|1(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:2[\s\xa0]*k|II)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|2(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:I(?:z(?:a[ij][a\xE1][sš])?|sa))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Samuelova|(?:2[\s\xa0]*k|II)[\s\xa0]*Samuelova|2(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Samuelova|(?:1[\s\xa0]*k|I)[\s\xa0]*Samuelova|1(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|Druh[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Druh[y\xFD][\s\xa0]*list|(?:4[\s\xa0]*k|2(?:[\s\xa0]*k)?|I[IV]|4)\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Druh[y\xFD]|4)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:4[\s\xa0]*k|2[\s\xa0]*k|I[IV])[\s\xa0]*Kr[a\xE1][lľ]ov|2(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])|[\s\xa0]*Kr[a\xE1][lľ])|\xED[\s\xa0]*Kr[a\xE1][lľ])ov|Prv[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Prv[y\xFD][\s\xa0]*list|(?:III|3[\s\xa0]*k|1(?:[\s\xa0]*k)?|[3I])\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Prv[y\xFD]|3)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:III|3[\s\xa0]*k|1[\s\xa0]*k|I)[\s\xa0]*Kr[a\xE1][lľ]ov|1(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|2(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|1(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:d(?:r[a\xE1][sš])?|ra))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:G(?:r[e\xE9]cke[\s\xa0]*[cč]asti[\s\xa0]*knihy[\s\xa0]*Ester|kEsth)|Ester[\s\xa0]*gr)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*J[o\xF3]|\.[\s\xa0]*J[o\xF3]|[\s\xa0]*J[o\xF3])bova|J[o\xF3]b)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*[zž]|\.[\s\xa0]*[zž]|[\s\xa0]*[zž])almov|[ZŽ]al(?:t[a\xE1]r|my)|[ZŽ](?:alm)?|Ps)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Azarj[a\xE1][sš]ova[\s\xa0]*modlitba|PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*pr[i\xED]slov[i\xED]|\.[\s\xa0]*pr[i\xED]slov[i\xED]|[\s\xa0]*pr[i\xED]slov[i\xED])|Pr(?:[i\xED]slovia|[i\xED]sl?|ov)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:oh(?:elet(?:[\s\xa0]*—[\s\xa0]*Kazate[lľ])?)?|(?:niha[\s\xa0]*kazate[lľ]|\.[\s\xa0]*kazate[lľ]|[\s\xa0]*kazate[lľ])ova|azate[lľ]|az)|E(?:kleziastes|ccl))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Traja[\s\xa0]*ml[a\xE1]denci[\s\xa0]*v[\s\xa0]*rozp[a\xE1]lenej[\s\xa0]*peci|Piese[nň][\s\xa0]*ml[a\xE1]dencov[\s\xa0]*v[\s\xa0]*ohnivej[\s\xa0]*peci|SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:V(?:e[lľ]p(?:iese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova)?|[lľ]p)|Piese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova|P(?:iese[nň][\s\xa0]*piesn[i\xED]|Š)|Pies|Song)|(?:Ve[lľ]piese[nň]|Piese[nň])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["<NAME>"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:e(?:chiel|k))?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:iel)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ho(?:ze[a\xE1][sš]|s)|Oz(?:e[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:<NAME>el)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:[A\xC1]m(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:edi[a\xE1][sš]|ad(?:i[a\xE1][sš])?)|Abd(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jon<NAME>"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jon(?:\xE1[sš]|a[hsš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mic(?:h(?:e[a\xE1][sš])?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xE1hum|ah(?:um)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?|Ab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sof(?:oni[a\xE1][sš])?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:geus)?|Ag(?:geus|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Z(?:ach(?:ari[a\xE1][sš])?|ech))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:achi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Mat[u\xFA][sš]a|M(?:at(?:[u\xFA][sš]a|t)|at(?:[u\xFA][sš])?|t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Marka|M(?:ar(?:ka|ek)|ark|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Luk[a\xE1][sš]a|L(?:uk(?:[a\xE1][sš]a|e)|uk[a\xE1][sš]|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*J[a\xE1]nov|(?:1[\s\xa0]*k|I)[\s\xa0]*J[a\xE1]nov|1(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Prv[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*J[a\xE1]nov|(?:2[\s\xa0]*k|II)[\s\xa0]*J[a\xE1]nov|2(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Druh[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|\xED[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*J[a\xE1]nov|(?:III|3[\s\xa0]*k)[\s\xa0]*J[a\xE1]nov|3(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Tret[i\xED][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*J[a\xE1]na|J(?:(?:oh)?n|[a\xE1]na|[a\xE1]n))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sk(?:utky(?:[\s\xa0]*apo[sš]tolov)?)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:(?:List[\s\xa0]*Rimano|R(?:\xEDmsky|imsky|imano|i|o))m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:2[\s\xa0]*k|II)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|2(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:1[\s\xa0]*k|I)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|1(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Gala[tť]anom|Ga(?:latsk[y\xFD]m|latanom|laťanom|l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Efezanom|E(?:fezsk[y\xFD]m|fezanom|ph|f))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filipanom|Filipsk[y\xFD]m|Filipanom|Phil|Fil|Flp)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Kolosanom|Kolosensk[y\xFD]m|Kolosanom|[CK]ol)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|2(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|1(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Timotej?ovi|(?:2[\s\xa0]*k|II)[\s\xa0]*Timotej?ovi|2(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Timotej?ovi|(?:1[\s\xa0]*k|I)[\s\xa0]*Timotej?ovi|1(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*T[i\xED]tovi|T(?:[i\xED]tovi|itus|[i\xED]t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filem[o\xF3]novi|Filemonovi|(?:File|(?:Ph|F)l)m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Hebrejom|Hebrejom|[ZŽ]idom|Hebr?|[ZŽ]id)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:a(?:k(?:ubov(?:[\s\xa0]*List)?)?|s)|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Petrov|(?:2[\s\xa0]*k|II)[\s\xa0]*Petrov|2(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Petrov|(?:1[\s\xa0]*k|I)[\s\xa0]*Petrov|1(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xFAd(?:ov(?:[\s\xa0]*List)?)?|ud(?:ov(?:[\s\xa0]*List)?|e)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha|\.)?[\s\xa0]*Juditina|J(?:udita|udit|dt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Proroctvo[\s\xa0]*Baruchovo|Bar(?:uch)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zuzan[ae]|Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ma(?:ch|k)|Ma(?:ch|k))|[y\xFD][\s\xa0]*list[\s\xa0]*Ma(?:ch|k)|[y\xFD][\s\xa0]*Ma(?:ch|k))abejcov|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ma(?:ch|k)abejcov|(?:2[\s\xa0]*k|II)[\s\xa0]*Ma(?:ch|k)abejcov|2(?:[\s\xa0]*Ma(?:ch|k)abejcov|[\s\xa0]*Ma(?:ch|k)|Macc))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*)?|[\s\xa0]*)|\xED[\s\xa0]*)Machabejcov|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Machabejcov|(?:III|3[\s\xa0]*k)[\s\xa0]*Machabejcov|3(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Machabejcov|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Machabejcov|(?:4[\s\xa0]*k|IV)[\s\xa0]*Machabejcov|4(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Mach|Ma(?:ch|k))|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*Mach)abejcov|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Machabejcov|(?:1[\s\xa0]*k|I)[\s\xa0]*Machabejcov|1(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
| true | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| ver[šs]ov | kapitoly | kapitole | kapitolu | kapitol | hlavy | a[žz] | porov | pozri | alebo | kap | ff | - | a
| [b-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [b-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:Prv[áa]#{bcv_parser::regexps.space}+kniha|Prv[ýy]#{bcv_parser::regexps.space}+list|Prv[áa]|Prv[ýy]|1#{bcv_parser::regexps.space}+k|I|1)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:Druh[áa]#{bcv_parser::regexps.space}+kniha|Druh[ýy]#{bcv_parser::regexps.space}+list|Druh[áa]|Druh[ýy]|2#{bcv_parser::regexps.space}+k|II|2)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:Tretia#{bcv_parser::regexps.space}+kniha|Tretia|Tret[íi]|3#{bcv_parser::regexps.space}+k|III|3)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:porov|pozri|alebo|a)|(?:a[žz]|-))"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|(?:a[žz]|-))"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|K(?:niha|\.)?[\s\xa0]*stvorenia|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:1[\s\xa0]*k|I)[\s\xa0]*Moj[zž]i[sš]ova|K(?:niha[\s\xa0]*p[o\xF4]|\.[\s\xa0]*p[o\xF4]|[\s\xa0]*p[o\xF4])vodu|1[\s\xa0]*Moj[zž]i[sš]ova|G(?:enezis|n)|1Moj|Gen|1[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[y\xFD][\s\xa0]*list[\s\xa0]*Moj[zž]i[sš]|[y\xFD][\s\xa0]*Moj[zž]i[sš])ova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:2[\s\xa0]*k|II)[\s\xa0]*Moj[zž]i[sš]ova|2[\s\xa0]*Moj[zž]i[sš]ova|Exodus|Ex(?:od)?|2Moj|2[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:B[e\xE9]l(?:[\s\xa0]*a[\s\xa0]*drak)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])|[\s\xa0]*Moj[zž]i[sš])|\xED[\s\xa0]*Moj[zž]i[sš])ova|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:III|3[\s\xa0]*k)[\s\xa0]*Moj[zž]i[sš]ova|3[\s\xa0]*Moj[zž]i[sš]ova|L(?:evitikus|v)|3Moj|Lev|3[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Moj[zž]i[sš]ova|(?:4[\s\xa0]*k|IV)[\s\xa0]*Moj[zž]i[sš]ova|4[\s\xa0]*Moj[zž]i[sš]ova|N(?:umeri|m)|4Moj|Num|4[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|\.[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus)|[\s\xa0]*(?:Sirachov(?:ho[\s\xa0]*syn|c(?:ov)?)a|Ekleziastikus))|Sir(?:achovcova|achovec)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:M[u\xFA]d(?:ros(?:ti?|ť))?|Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pla[cč][\s\xa0]*Jeremi[a\xE1][sš]ov|Jeremi[a\xE1][sš]ov[\s\xa0]*Pla[cč]|K(?:niha[\s\xa0]*n[a\xE1]|\.[\s\xa0]*n[a\xE1]|[\s\xa0]*n[a\xE1])rekov|[ZŽ]alospevy|[ZŽ]alosp|N[a\xE1]reky|N[a\xE1]r|Lam)|(?:Pla[cč])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jeremi[a\xE1][sš]ov[\s\xa0]*list|EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zj(?:av(?:enie(?:[\s\xa0]*(?:Apo[sš]tola[\s\xa0]*J[a\xE1]|sv[a\xE4]t[e\xE9]ho[\s\xa0]*J[a\xE1]|J[a\xE1])na)?)?|v)?|Apokalypsa|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Manasesova[\s\xa0]*modlitba|PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Piata[\s\xa0]*(?:kniha[\s\xa0]*Moj[zž]i[sš]|Moj[zž]i[sš])ova|(?:5(?:[\s\xa0]*k)?|V)\.[\s\xa0]*Moj[zž]i[sš]ova|D(?:euteron[o\xF3]mium|t)|(?:5[\s\xa0]*k|V)[\s\xa0]*Moj[zž]i[sš]ova|5[\s\xa0]*Moj[zž]i[sš]ova|Deut|5[\s\xa0]*M)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xF3zu(?:ov)?a|o(?:z(?:uova|u[ae])?|šu(?:ov)?a|s(?:u(?:ov)?a|h)))|Iosua)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K\.?[\s\xa0]*sudcov|S(?:udcovia|dc)|Sud(?:cov)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:R(?:uth?|\xFAt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:1[\s\xa0]*k|I)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|1(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Ezdr[a\xE1][sš](?:ova)?)|[y\xFD][\s\xa0]*list[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|[y\xFD][\s\xa0]*Ezdr[a\xE1][sš](?:ova)?)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|(?:2[\s\xa0]*k|II)[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|2(?:[\s\xa0]*Ezdr[a\xE1][sš](?:ova)?|Esd))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:I(?:z(?:a[ij][a\xE1][sš])?|sa))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Samuelova|(?:2[\s\xa0]*k|II)[\s\xa0]*Samuelova|2(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*)Samuelova|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Samuelova|(?:1[\s\xa0]*k|I)[\s\xa0]*Samuelova|1(?:[\s\xa0]*Samuelova|[\s\xa0]*S(?:am)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|Druh[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Druh[y\xFD][\s\xa0]*list|(?:4[\s\xa0]*k|2(?:[\s\xa0]*k)?|I[IV]|4)\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Druh[y\xFD]|4)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:4[\s\xa0]*k|2[\s\xa0]*k|I[IV])[\s\xa0]*Kr[a\xE1][lľ]ov|2(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])|[\s\xa0]*Kr[a\xE1][lľ])|\xED[\s\xa0]*Kr[a\xE1][lľ])ov|Prv[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Kr[a\xE1][lľ]|Kr[a\xE1][lľ])ov|(?:Prv[y\xFD][\s\xa0]*list|(?:III|3[\s\xa0]*k|1(?:[\s\xa0]*k)?|[3I])\.)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:Prv[y\xFD]|3)[\s\xa0]*Kr[a\xE1][lľ]ov|(?:III|3[\s\xa0]*k|1[\s\xa0]*k|I)[\s\xa0]*Kr[a\xE1][lľ]ov|1(?:[\s\xa0]*Kr[a\xE1][lľ]ov|[\s\xa0]*Kr[lľ]|[\s\xa0]*Kr|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|2(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|[y\xFD][\s\xa0]*Paralipomenon|[y\xFD][\s\xa0]*Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Paralipomenon|Kron(?:i(?:ck[a\xE1]|k)|\xEDk))|1(?:[\s\xa0]*Paralipomenon|[\s\xa0]*Kroni(?:ck[a\xE1]|k)|[\s\xa0]*Kron\xEDk|[\s\xa0]*Kron|[\s\xa0]*Krn|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:d(?:r[a\xE1][sš])?|ra))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:G(?:r[e\xE9]cke[\s\xa0]*[cč]asti[\s\xa0]*knihy[\s\xa0]*Ester|kEsth)|Ester[\s\xa0]*gr)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*J[o\xF3]|\.[\s\xa0]*J[o\xF3]|[\s\xa0]*J[o\xF3])bova|J[o\xF3]b)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*[zž]|\.[\s\xa0]*[zž]|[\s\xa0]*[zž])almov|[ZŽ]al(?:t[a\xE1]r|my)|[ZŽ](?:alm)?|Ps)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Azarj[a\xE1][sš]ova[\s\xa0]*modlitba|PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha[\s\xa0]*pr[i\xED]slov[i\xED]|\.[\s\xa0]*pr[i\xED]slov[i\xED]|[\s\xa0]*pr[i\xED]slov[i\xED])|Pr(?:[i\xED]slovia|[i\xED]sl?|ov)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:oh(?:elet(?:[\s\xa0]*—[\s\xa0]*Kazate[lľ])?)?|(?:niha[\s\xa0]*kazate[lľ]|\.[\s\xa0]*kazate[lľ]|[\s\xa0]*kazate[lľ])ova|azate[lľ]|az)|E(?:kleziastes|ccl))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Traja[\s\xa0]*ml[a\xE1]denci[\s\xa0]*v[\s\xa0]*rozp[a\xE1]lenej[\s\xa0]*peci|Piese[nň][\s\xa0]*ml[a\xE1]dencov[\s\xa0]*v[\s\xa0]*ohnivej[\s\xa0]*peci|SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:V(?:e[lľ]p(?:iese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova)?|[lľ]p)|Piese[nň][\s\xa0]*[SŠ]alam[u\xFA]nova|P(?:iese[nň][\s\xa0]*piesn[i\xED]|Š)|Pies|Song)|(?:Ve[lľ]piese[nň]|Piese[nň])
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PI:NAME:<NAME>END_PI"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ez(?:e(?:chiel|k))?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:iel)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ho(?:ze[a\xE1][sš]|s)|Oz(?:e[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PI:NAME:<NAME>END_PIel)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:[A\xC1]m(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:edi[a\xE1][sš]|ad(?:i[a\xE1][sš])?)|Abd(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["JonPI:NAME:<NAME>END_PI"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jon(?:\xE1[sš]|a[hsš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mic(?:h(?:e[a\xE1][sš])?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xE1hum|ah(?:um)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?|Ab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sof(?:oni[a\xE1][sš])?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:geus)?|Ag(?:geus|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Z(?:ach(?:ari[a\xE1][sš])?|ech))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:achi[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Mat[u\xFA][sš]a|M(?:at(?:[u\xFA][sš]a|t)|at(?:[u\xFA][sš])?|t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Marka|M(?:ar(?:ka|ek)|ark|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*Luk[a\xE1][sš]a|L(?:uk(?:[a\xE1][sš]a|e)|uk[a\xE1][sš]|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*J[a\xE1]nov|(?:1[\s\xa0]*k|I)[\s\xa0]*J[a\xE1]nov|1(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Prv[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[y\xFD][\s\xa0]*J[a\xE1]nov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*J[a\xE1]nov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*J[a\xE1]nov|(?:2[\s\xa0]*k|II)[\s\xa0]*J[a\xE1]nov|2(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Druh[y\xFD][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*J[a\xE1]|J[a\xE1])nov|[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|\xED[\s\xa0]*J[a\xE1]nov[\s\xa0]*list)|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*J[a\xE1]nov|(?:III|3[\s\xa0]*k)[\s\xa0]*J[a\xE1]nov|3(?:[\s\xa0]*J[a\xE1]nov|(?:J(?:oh|[a\xE1])|[\s\xa0]*J)n|[\s\xa0]*J))|(?:Tret[i\xED][\s\xa0]*J[a\xE1]nov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Evanjelium[\s\xa0]*Pod[lľ]a[\s\xa0]*J[a\xE1]na|J(?:(?:oh)?n|[a\xE1]na|[a\xE1]n))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sk(?:utky(?:[\s\xa0]*apo[sš]tolov)?)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:(?:List[\s\xa0]*Rimano|R(?:\xEDmsky|imsky|imano|i|o))m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:2[\s\xa0]*k|II)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|2(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|Korin(?:ťano|t(?:sk[y\xFD]|ano)))|[y\xFD][\s\xa0]*list[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))|[y\xFD][\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano)))m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:1[\s\xa0]*k|I)[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|1(?:[\s\xa0]*Korin(?:ťano|t(?:sk[y\xFD]|ano))m|(?:[\s\xa0]*K|C)or))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Gala[tť]anom|Ga(?:latsk[y\xFD]m|latanom|laťanom|l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Efezanom|E(?:fezsk[y\xFD]m|fezanom|ph|f))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filipanom|Filipsk[y\xFD]m|Filipanom|Phil|Fil|Flp)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Kolosanom|Kolosensk[y\xFD]m|Kolosanom|[CK]ol)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:2[\s\xa0]*k|II)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|2(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*list[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))|[y\xFD][\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))|[y\xFD][\s\xa0]*Sol[u\xFA]n[cč]ano|[y\xFD][\s\xa0]*Sol[u\xFA]nsky)m|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|(?:1[\s\xa0]*k|I)[\s\xa0]*(?:Tesaloni(?:čano|c(?:ano|k[y\xFD]))|Sol(?:[u\xFA]n[cč]ano|[u\xFA]nsky))m|1(?:[\s\xa0]*Tesaloni(?:čano|c(?:ano|k[y\xFD]))m|[\s\xa0]*Sol[u\xFA]n[cč]anom|[\s\xa0]*Sol[u\xFA]nskym|Thess|[\s\xa0]*(?:Sol|Tes)))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Timotej?ovi|(?:2[\s\xa0]*k|II)[\s\xa0]*Timotej?ovi|2(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Timotej?|Timotej?)|[y\xFD][\s\xa0]*list[\s\xa0]*Timotej?|[y\xFD][\s\xa0]*Timotej?)ovi|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Timotej?ovi|(?:1[\s\xa0]*k|I)[\s\xa0]*Timotej?ovi|1(?:[\s\xa0]*Timotej?ovi|[\s\xa0]*?Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*T[i\xED]tovi|T(?:[i\xED]tovi|itus|[i\xED]t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Filem[o\xF3]novi|Filemonovi|(?:File|(?:Ph|F)l)m)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:List[\s\xa0]*Hebrejom|Hebrejom|[ZŽ]idom|Hebr?|[ZŽ]id)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:a(?:k(?:ubov(?:[\s\xa0]*List)?)?|s)|k))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Petrov|(?:2[\s\xa0]*k|II)[\s\xa0]*Petrov|2(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Petrov|[y\xFD][\s\xa0]*Petrov[\s\xa0]*list|[y\xFD][\s\xa0]*list[\s\xa0]*Petrov|[y\xFD][\s\xa0]*Petrov)|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Petrov|(?:1[\s\xa0]*k|I)[\s\xa0]*Petrov|1(?:[\s\xa0]*Petrov|(?:[\s\xa0]*P|Pe)t))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:J(?:\xFAd(?:ov(?:[\s\xa0]*List)?)?|ud(?:ov(?:[\s\xa0]*List)?|e)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob(?:i[a\xE1][sš])?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:K(?:niha|\.)?[\s\xa0]*Juditina|J(?:udita|udit|dt))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Proroctvo[\s\xa0]*Baruchovo|Bar(?:uch)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Zuzan[ae]|Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Druh(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Ma(?:ch|k)|Ma(?:ch|k))|[y\xFD][\s\xa0]*list[\s\xa0]*Ma(?:ch|k)|[y\xFD][\s\xa0]*Ma(?:ch|k))abejcov|(?:2(?:[\s\xa0]*k)?|II)\.[\s\xa0]*Ma(?:ch|k)abejcov|(?:2[\s\xa0]*k|II)[\s\xa0]*Ma(?:ch|k)abejcov|2(?:[\s\xa0]*Ma(?:ch|k)abejcov|[\s\xa0]*Ma(?:ch|k)|Macc))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Tret(?:i(?:a[\s\xa0]*(?:kniha[\s\xa0]*)?|[\s\xa0]*)|\xED[\s\xa0]*)Machabejcov|(?:III|3(?:[\s\xa0]*k)?)\.[\s\xa0]*Machabejcov|(?:III|3[\s\xa0]*k)[\s\xa0]*Machabejcov|3(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:[SŠ]tvrt[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*)?Machabejcov|(?:4(?:[\s\xa0]*k)?|IV)\.[\s\xa0]*Machabejcov|(?:4[\s\xa0]*k|IV)[\s\xa0]*Machabejcov|4(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:Prv(?:[a\xE1][\s\xa0]*(?:kniha[\s\xa0]*Mach|Ma(?:ch|k))|(?:[y\xFD][\s\xa0]*list|[y\xFD])[\s\xa0]*Mach)abejcov|(?:1(?:[\s\xa0]*k)?|I)\.[\s\xa0]*Machabejcov|(?:1[\s\xa0]*k|I)[\s\xa0]*Machabejcov|1(?:[\s\xa0]*Machabejcov|[\s\xa0]*Mach|Macc|[\s\xa0]*Mak))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
|
[
{
"context": " expect(stream._entryToString({account: {name: \"hello\"}, msg: \"world\"}))\n .to.equal \"world\"\n\n ",
"end": 1016,
"score": 0.47678056359291077,
"start": 1011,
"tag": "USERNAME",
"value": "hello"
},
{
"context": " expect(stream._entryToString({account: ... | test/stringifierTest.coffee | yedidyak/bunyan-debug-stream | 32 | {expect} = require 'chai'
bunyanDebugStream = require '../src/BunyanDebugStream'
describe 'Tests with stringifiers', ->
it 'should work with a prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> account?.name
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
# Should work for an account
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "[hello] world"
# Should work if the account is missing
expect(stream._entryToString({account: null, msg: "world"}))
.to.equal "world"
it 'should hide fields if the prefixer returns null', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
it 'should hide fields for a null prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
describe 'req stringifier', ->
it 'should work', ->
entry = {
req:
headers:
host: 'foo.com'
method: 'GET'
url: "/index.html"
user:
name: 'dave'
res:
headers:
"content-length": 500
responseTime: 100
statusCode: 404
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET dave@foo.com/index.html 404 100ms - 500 bytes'
expect('req' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.true
it 'should hide all the variables in a bunyan-express-logger req', ->
entry = {
"method": 'GET'
"status-code": 200
"url": '/index.html'
"res-headers": []
"req": {
headers: {
host: 'foo.com'
}
method: 'GET'
url: "/index.html"
}
msg: 'hello'
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET foo.com/index.html 200'
expect('req' in consumed).to.be.true
expect('body' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.false
| 192993 | {expect} = require 'chai'
bunyanDebugStream = require '../src/BunyanDebugStream'
describe 'Tests with stringifiers', ->
it 'should work with a prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> account?.name
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
# Should work for an account
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "[hello] world"
# Should work if the account is missing
expect(stream._entryToString({account: null, msg: "world"}))
.to.equal "world"
it 'should hide fields if the prefixer returns null', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
it 'should hide fields for a null prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
describe 'req stringifier', ->
it 'should work', ->
entry = {
req:
headers:
host: 'foo.com'
method: 'GET'
url: "/index.html"
user:
name: '<NAME>'
res:
headers:
"content-length": 500
responseTime: 100
statusCode: 404
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET <EMAIL>/index.html 404 100ms - 500 bytes'
expect('req' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.true
it 'should hide all the variables in a bunyan-express-logger req', ->
entry = {
"method": 'GET'
"status-code": 200
"url": '/index.html'
"res-headers": []
"req": {
headers: {
host: 'foo.com'
}
method: 'GET'
url: "/index.html"
}
msg: 'hello'
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET foo.com/index.html 200'
expect('req' in consumed).to.be.true
expect('body' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.false
| true | {expect} = require 'chai'
bunyanDebugStream = require '../src/BunyanDebugStream'
describe 'Tests with stringifiers', ->
it 'should work with a prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> account?.name
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
# Should work for an account
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "[hello] world"
# Should work if the account is missing
expect(stream._entryToString({account: null, msg: "world"}))
.to.equal "world"
it 'should hide fields if the prefixer returns null', ->
stream = bunyanDebugStream {
prefixers:
account: (account) -> null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
it 'should hide fields for a null prefixer', ->
stream = bunyanDebugStream {
prefixers:
account: null
colors: false, showDate: false, showLevel: false, showLoggerName: false, showPid: false
}
expect(stream._entryToString({account: {name: "hello"}, msg: "world"}))
.to.equal "world"
describe 'req stringifier', ->
it 'should work', ->
entry = {
req:
headers:
host: 'foo.com'
method: 'GET'
url: "/index.html"
user:
name: 'PI:NAME:<NAME>END_PI'
res:
headers:
"content-length": 500
responseTime: 100
statusCode: 404
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET PI:EMAIL:<EMAIL>END_PI/index.html 404 100ms - 500 bytes'
expect('req' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.true
it 'should hide all the variables in a bunyan-express-logger req', ->
entry = {
"method": 'GET'
"status-code": 200
"url": '/index.html'
"res-headers": []
"req": {
headers: {
host: 'foo.com'
}
method: 'GET'
url: "/index.html"
}
msg: 'hello'
}
{consumed, value, replaceMessage} = bunyanDebugStream.stdStringifiers.req(
entry.req, {entry, useColor: false})
expect(value).to.equal 'GET foo.com/index.html 200'
expect('req' in consumed).to.be.true
expect('body' in consumed).to.be.true
expect(replaceMessage, "replaceMessage").to.be.false
|
[
{
"context": "d', pluginUrl])\nwindow._gaq.push([\"_setAccount\", \"UA-1653683-4\"])\nwindow._gaq.push([\"_trackPageview\"])\n",
"end": 250,
"score": 0.8151795268058777,
"start": 247,
"tag": "KEY",
"value": "UA-"
},
{
"context": "Url])\nwindow._gaq.push([\"_setAccount\", \"UA-1653683... | goingslowly/legacy/v2/lib/goingslowly/assets/js/gs/core.coffee | tkellen/infrastructure | 19 | window.gs = gs = window.gs || {}
# google analytics
window._gaq = window._gaq || []
pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js'
window._gaq.push(['_require', 'inpage_linkid', pluginUrl])
window._gaq.push(["_setAccount", "UA-1653683-4"])
window._gaq.push(["_trackPageview"])
# replace initial state so it works properly with popstate
# todo: does not work correctly with firefox for getting to initial page
window.onload = ->
history.replaceState({real: true});
# configure humane loggers
humane.info = humane.spawn({addnCls: "humane-info", timeout: 3500})
humane.error = humane.spawn({addnCls: "humane-error", timeout: 3500})
humane.success = humane.spawn({addnCls: "humane-success", timeout: 3500})
# run on page load for all pages
$ ->
# initialize autosuggest
$("#search").autosuggest({data: gs.autosuggest, offsetTop: 32})
# initialize gsdb tooltip
$("#gsdb").tipTip(
defaultPosition: "left"
content: "<strong class=\"color\">Behind the Scenes:</strong><br/>Statistics, expenses and more."
).removeAttr "title"
# flag external links to open in a new window
$(document).on "click", "a[rel=external]", (e) ->
window.open @href
e.preventDefault()
# ajaxify forms
$("form.ajax").each ->
gs.form.init(this)
# # enable newsletter colorbox
# $(".nl").colorbox
# opacity: 0.85
# href: "/newsletter?popup=true"
# title: " "
# scrolling: false
# height: 525
# enable kids colorbox
$("#kids").colorbox
opacity: 0.75
href: "/settings"
| 203694 | window.gs = gs = window.gs || {}
# google analytics
window._gaq = window._gaq || []
pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js'
window._gaq.push(['_require', 'inpage_linkid', pluginUrl])
window._gaq.push(["_setAccount", "<KEY>1653683-<KEY>"])
window._gaq.push(["_trackPageview"])
# replace initial state so it works properly with popstate
# todo: does not work correctly with firefox for getting to initial page
window.onload = ->
history.replaceState({real: true});
# configure humane loggers
humane.info = humane.spawn({addnCls: "humane-info", timeout: 3500})
humane.error = humane.spawn({addnCls: "humane-error", timeout: 3500})
humane.success = humane.spawn({addnCls: "humane-success", timeout: 3500})
# run on page load for all pages
$ ->
# initialize autosuggest
$("#search").autosuggest({data: gs.autosuggest, offsetTop: 32})
# initialize gsdb tooltip
$("#gsdb").tipTip(
defaultPosition: "left"
content: "<strong class=\"color\">Behind the Scenes:</strong><br/>Statistics, expenses and more."
).removeAttr "title"
# flag external links to open in a new window
$(document).on "click", "a[rel=external]", (e) ->
window.open @href
e.preventDefault()
# ajaxify forms
$("form.ajax").each ->
gs.form.init(this)
# # enable newsletter colorbox
# $(".nl").colorbox
# opacity: 0.85
# href: "/newsletter?popup=true"
# title: " "
# scrolling: false
# height: 525
# enable kids colorbox
$("#kids").colorbox
opacity: 0.75
href: "/settings"
| true | window.gs = gs = window.gs || {}
# google analytics
window._gaq = window._gaq || []
pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js'
window._gaq.push(['_require', 'inpage_linkid', pluginUrl])
window._gaq.push(["_setAccount", "PI:KEY:<KEY>END_PI1653683-PI:KEY:<KEY>END_PI"])
window._gaq.push(["_trackPageview"])
# replace initial state so it works properly with popstate
# todo: does not work correctly with firefox for getting to initial page
window.onload = ->
history.replaceState({real: true});
# configure humane loggers
humane.info = humane.spawn({addnCls: "humane-info", timeout: 3500})
humane.error = humane.spawn({addnCls: "humane-error", timeout: 3500})
humane.success = humane.spawn({addnCls: "humane-success", timeout: 3500})
# run on page load for all pages
$ ->
# initialize autosuggest
$("#search").autosuggest({data: gs.autosuggest, offsetTop: 32})
# initialize gsdb tooltip
$("#gsdb").tipTip(
defaultPosition: "left"
content: "<strong class=\"color\">Behind the Scenes:</strong><br/>Statistics, expenses and more."
).removeAttr "title"
# flag external links to open in a new window
$(document).on "click", "a[rel=external]", (e) ->
window.open @href
e.preventDefault()
# ajaxify forms
$("form.ajax").each ->
gs.form.init(this)
# # enable newsletter colorbox
# $(".nl").colorbox
# opacity: 0.85
# href: "/newsletter?popup=true"
# title: " "
# scrolling: false
# height: 525
# enable kids colorbox
$("#kids").colorbox
opacity: 0.75
href: "/settings"
|
[
{
"context": "= ruleStr.replace /\\s/g, '_'\n keys.push \"rule=#{ruleStr}\"\n keys.push \"viewbase=#{@getObserver()",
"end": 11543,
"score": 0.8728073239326477,
"start": 11541,
"tag": "KEY",
"value": "#{"
},
{
"context": "ffsetMatrix()\n \n keys.push \"viewoffset=#{rot}... | src/ui/application.coffee | undeadinu/hyperbolic-ca-simulator | 32 | "use strict"
#Core hyperbolic group compuatation library
{unity} = require "../core/vondyck_chain.coffee"
{ChainMap} = require "../core/chain_map.coffee"
{RegularTiling} = require "../core/regular_tiling.coffee"
{evaluateTotalisticAutomaton} = require "../core/cellular_automata.coffee"
{stringifyFieldData, parseFieldData, importField, randomFillFixedNum, exportField, randomStateGenerator} = require "../core/field.coffee"
{GenericTransitionFunc, BinaryTransitionFunc,DayNightTransitionFunc, parseTransitionFunction} = require "../core/rule.coffee"
M = require "../core/matrix3.coffee"
#Application components
{Animator} = require "./animator.coffee"
{MouseToolCombo} = require "./mousetool.coffee"
{Navigator} = require "./navigator.coffee"
{FieldObserver} = require "./observer.coffee"
{GenerateFileList, OpenDialog, SaveDialog} = require "./indexeddb.coffee"
#{FieldObserverWithRemoreRenderer} = require "./observer_remote.coffee"
#Misc utilities
{E, getAjax, ButtonGroup, windowWidth, windowHeight, documentWidth, removeClass, addClass, ValidatingInput} = require "./htmlutil.coffee"
{DomBuilder} = require "./dom_builder.coffee"
{parseIntChecked, parseFloatChecked} = require "../core/utils.coffee"
{parseUri} = require "./parseuri.coffee"
{getCanvasCursorPosition} = require "./canvas_util.coffee"
C2S = require "../ext/canvas2svg.js"
#{lzw_encode} = require "../ext/lzw.coffee"
require "../ext/polyfills.js"
require "../core/acosh_polyfill.coffee"
{GhostClickDetector} = require "./ghost_click_detector.coffee"
MIN_WIDTH = 100
minVisibleSize = 1/100
canvasSizeUpdateBlocked = false
randomFillNum = 2000
randomFillPercent = 0.4
class DefaultConfig
getGrid: -> [7,3]
getCellData: -> ""
getGeneration: -> 0
getFunctionCode: -> "B 3 S 2 3"
getViewBase: -> unity
getViewOffset: -> M.eye()
class UriConfig
constructor: ->
@keys = parseUri(""+window.location).queryKey
getGrid: ->
if @keys.grid?
try
match = @keys.grid.match /(\d+)[,;](\d+)/
throw new Error("Syntax is bad: #{@keys.grid}") unless match
n = parseIntChecked match[1]
m = parseIntChecked match[2]
return [n,m]
catch e
alert "Bad grid paramters: #{@keys.grid}"
return [7,3]
getCellData: ->@keys.cells
getGeneration: ->
if @keys.generation?
try
return parseIntChecked @keys.generation
catch e
alert "Bad generationn umber: #{@keys.generation}"
return 0
getFunctionCode: ->
if @keys.rule?
@keys.rule.replace /_/g, ' '
else
"B 3 S 2 3"
getViewBase: ->
return unity unless @keys.viewbase?
RegularTiling::parse @keys.viewbase
getViewOffset: ->
return M.eye() unless @keys.viewoffset?
[rot, dx, dy] = (parseFloatChecked part for part in @keys.viewoffset.split ':')
M.mul M.translationMatrix(dx, dy), M.rotationMatrix(rot)
class Application
constructor: ->
@tiling = null
@observer = null
@navigator = null
@animator = null
@cells = null
@generation = 0
@transitionFunc = null
@lastBinaryTransitionFunc = null
#@ObserverClass = FieldObserverWithRemoreRenderer
@ObserverClass = FieldObserver
@margin = 16 #margin pixels
setCanvasResize: (enable) -> canvasSizeUpdateBlocked = enable
getCanvasResize: -> canvasSizeUpdateBlocked
redraw: -> redraw()
getObserver: -> @observer
drawEverything: -> drawEverything canvas.width, canvas.height, context
uploadToServer: (name, cb) -> uploadToServer name, cb
getCanvas: -> canvas
getTransitionFunc: -> @transitionFunc
getMargin: -> if @observer.isDrawingHomePtr then @margin else 0
setShowLiveBorders: (isDrawing)->
@observer.isDrawingLiveBorders = isDrawing
redraw()
setDrawingHomePtr: (isDrawing)->
@observer.isDrawingHomePtr = isDrawing
redraw()
if localStorage?
localStorage.setItem "observer.isDrawingHomePtr", if isDrawing then "1" else "0"
console.log "store #{isDrawing}"
#Convert canvas X,Y coordinates to relative X,Y in (0..1) range
canvas2relative: (x,y) ->
s = Math.min(canvas.width, canvas.height) - 2*@getMargin()
isize = 2.0/s
[(x - canvas.width*0.5)*isize, (y - canvas.height*0.5)*isize]
initialize: (config = new DefaultConfig)->
[n,m] = config.getGrid()
@tiling = new RegularTiling n, m
cellData = config.getCellData()
if cellData
console.log "import: #{cellData}"
@importData cellData
else
@cells = new ChainMap
@cells.put unity, 1
@observer = new @ObserverClass @tiling, minVisibleSize, config.getViewBase(), config.getViewOffset()
if (isDrawing=localStorage?.getItem('observer.isDrawingHomePtr'))?
isDrawing = isDrawing is '1'
E('flag-origin-mark').checked = isDrawing
@observer.isDrawingHomePtr = isDrawing
console.log "restore #{isDrawing}"
else
@setDrawingHomePtr E('flag-origin-mark').checked
@setShowLiveBorders E('flag-live-borders').checked
@observer.onFinish = -> redraw()
@navigator = new Navigator this
@animator = new Animator this
@paintStateSelector = new PaintStateSelector this, E("state-selector"), E("state-selector-buttons")
@transitionFunc = parseTransitionFunction config.getFunctionCode(), application.tiling.n, application.tiling.m
@lastBinaryTransitionFunc = @transitionFunc
@openDialog = new OpenDialog this
@saveDialog = new SaveDialog this
@svgDialog = new SvgDialog this
@ruleEntry = new ValidatingInput E('rule-entry'),
((ruleStr) =>
console.log "Parsing TF {@tiling.n} {@tiling.m}"
parseTransitionFunction ruleStr, @tiling.n, @tiling.m),
((rule)->""+rule),
@transitionFunc
@ruleEntry.onparsed = (rule) => @doSetRule()
@updateRuleEditor()
@updateGridUI()
updateRuleEditor: ->
switch @transitionFunc.getType()
when "binary"
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
when "custom"
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
else
console.dir @transitionFunc
throw new Error "Bad transition func"
doSetRule: ->
if @ruleEntry.message?
alert "Failed to parse function: #{@ruleEntry.message}"
@transitionFunc = @lastBinaryTransitionFunc ? @transitionFunc
else
console.log "revalidate"
@ruleEntry.revalidate()
@transitionFunc = @ruleEntry.value
@lastBinaryTransitionFunc = @transitionFunc
@paintStateSelector.update @transitionFunc
console.log @transitionFunc
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
setGridImpl: (n, m)->
@tiling = new RegularTiling n, m
#transition function should be changed too.
if @transitionFunc?
@transitionFunc = @transitionFunc.changeGrid @tiling.n, @tiling.m
@observer?.shutdown()
oldObserver = @observer
@observer = new @ObserverClass @tiling, minVisibleSize
@observer.isDrawingHomePtr = oldObserver.isDrawingHomePtr
@observer.onFinish = -> redraw()
@navigator?.clear()
doClearMemory()
doStopPlayer()
@updateGridUI()
updateGridUI: ->
E('entry-n').value = "" + application.tiling.n
E('entry-m').value = "" + application.tiling.m
E('grid-num-neighbors').innerHTML = (@tiling.m-2)*@tiling.n
#Actions
doRandomFill: ->
randomFillFixedNum @cells, randomFillPercent, unity, randomFillNum, @tiling, randomStateGenerator(@transitionFunc.numStates)
updatePopulation()
redraw()
doStep: (onFinish)->
#Set generation for thse rules who depend on it
@transitionFunc.setGeneration @generation
@cells = evaluateTotalisticAutomaton @cells, @tiling, @transitionFunc.evaluate.bind(@transitionFunc), @transitionFunc.plus, @transitionFunc.plusInitial
@generation += 1
redraw()
updatePopulation()
updateGeneration()
onFinish?()
doReset: ->
@cells = new ChainMap
@generation = 0
@cells.put unity, 1
updatePopulation()
updateGeneration()
redraw()
doSearch: ->
found = @navigator.search @cells
updateCanvasSize()
if found > 0
@navigator.navigateToResult 0
importData: (data)->
try
console.log "importing #{data}"
match = data.match /^(\d+)\$(\d+)\$(.*)$/
throw new Error("Data format unrecognized") unless match?
n = parseIntChecked match[1]
m = parseIntChecked match[2]
if n isnt @tiling.n or m isnt @tiling.m
console.log "Need to change grid"
@setGridImpl n, m
#normzlize chain coordinates, so that importing of user-generated data could be possible
normalizeChain = (chain) => @tiling.toCell @tiling.rewrite chain
@cells = importField parseFieldData(match[3]), null, normalizeChain
console.log "Imported #{@cells.count} cells"
catch e
alert "Faield to import data: #{e}"
@cells = new ChainMap
loadData: (record, cellData) ->
assert = (x) ->
throw new Error("Assertion failure") unless x?
x
@setGridImpl assert(record.gridN), assert(record.gridM)
@animator.reset()
@cells = importField parseFieldData assert(cellData)
@generation = assert record.generation
@observer.navigateTo @tiling.parse(assert(record.base)), assert(record.offset)
console.log "LOading func type= #{record.funcType}"
switch record.funcType
when "binary"
@transitionFunc = parseTransitionFunction record.funcId, record.gridN, record.gridM
@ruleEntry.setValue @transitionFunc
when "custom"
@transitionFunc = new GenericTransitionFunc record.funcId
@paintStateSelector.update @transitionFunc
else
throw new Error "unknown TF type #{record.funcType}"
updatePopulation()
updateGeneration()
@updateRuleEditor()
redraw()
getSaveData: (fname)->
#[data, catalogRecord]
fieldData = stringifyFieldData exportField @cells
funcId = ""+@getTransitionFunc()
funcType = @getTransitionFunc().getType()
catalogRecord =
gridN: @tiling.n
gridM: @tiling.m
name: fname
funcId: funcId
funcType: funcType
base: @getObserver().getViewCenter().toString()
offset: @getObserver().getViewOffsetMatrix()
size: fieldData.length
time: Date.now()
field: null
generation: @generation
return [fieldData, catalogRecord]
toggleCellAt: (x,y) ->
[xp, yp] = @canvas2relative x, y
try
cell = @observer.cellFromPoint xp, yp
catch e
return
if @cells.get(cell) is @paintStateSelector.state
@cells.remove cell
else
@cells.put cell, @paintStateSelector.state
redraw()
doExportSvg: ->
sz = 512
svgContext = new C2S sz, sz
drawEverything sz, sz, svgContext
# Show the generated SVG image
@svgDialog.show svgContext.getSerializedSvg()
doExportUrl: ->
#Export field state as URL
keys = []
keys.push "grid=#{@tiling.n},#{@tiling.m}"
if @cells.count != 0
keys.push "cells=#{@tiling.n}$#{@tiling.m}$#{stringifyFieldData exportField @cells}"
keys.push "generation=#{@generation}"
if @transitionFunc.getType() is "binary"
ruleStr = ""+@transitionFunc
ruleStr = ruleStr.replace /\s/g, '_'
keys.push "rule=#{ruleStr}"
keys.push "viewbase=#{@getObserver().getViewCenter()}"
[rot, dx, dy] = M.hyperbolicDecompose @getObserver().getViewOffsetMatrix()
keys.push "viewoffset=#{rot}:#{dx}:#{dy}"
basePath = location.href.replace(location.search, '')
uri = basePath + "?" + keys.join("&")
showExportDialog uri
class SvgDialog
constructor: (@application) ->
@dialog = E('svg-export-dialog')
@imgContainer = E('svg-image-container')
close: ->
@imgContainer.innerHTML = ""
@dialog.style.display="none"
show: (svg) ->
dataUri = "data:image/svg+xml;utf8," + encodeURIComponent(svg)
dom = new DomBuilder()
dom.tag('img').a('src', dataUri).a('alt', 'SVG image').a('title', 'Use right click to save SVG image').end()
@imgContainer.innerHTML = ""
@imgContainer.appendChild dom.finalize()
#@imgContainer.innerHTML = svg
@dialog.style.display=""
updateCanvasSize = ->
return if canvasSizeUpdateBlocked
docW = documentWidth()
winW = windowWidth()
if docW > winW
console.log "overflow"
usedWidth = docW - canvas.width
#console.log "#Win: #{windowWidth()}, doc: #{documentWidth()}, used: #{usedWidth}"
w = winW - usedWidth
else
#console.log "underflow"
containerAvail=E('canvas-container').clientWidth
#console.log "awail width: #{containerAvail}"
w = containerAvail
#now calculae available height
canvasRect = canvas.getBoundingClientRect()
winH = windowHeight()
h = winH - canvasRect.top
navWrap = E('navigator-wrap')
navWrap.style.height = "#{winH - navWrap.getBoundingClientRect().top - 16}px"
#get the smaller of both
w = Math.min(w,h)
#reduce it a bit
w -= 16
#make width multiple of 16
w = w & ~ 15
#console.log "New w is #{w}"
if w <= MIN_WIDTH
w = MIN_WIDTH
if canvas.width isnt w
canvas.width = canvas.height = w
redraw()
E('image-size').value = ""+w
return
doSetFixedSize = (isFixed) ->
if isFixed
size = parseIntChecked E('image-size').value
if size <= 0 or size >=65536
throw new Error "Bad size: #{size}"
canvasSizeUpdateBlocked = true
canvas.width = canvas.height = size
redraw()
else
canvasSizeUpdateBlocked = false
updateCanvasSize()
class PaintStateSelector
constructor: (@application, @container, @buttonContainer)->
@state = 1
@numStates = 2
update: ->
numStates = @application.getTransitionFunc().numStates
#only do something if number of states changed
return if numStates == @numStates
@numStates = numStates
console.log "Num states changed to #{numStates}"
if @state >= numStates
@state = 1
@buttonContainer.innerHTML = ''
if numStates <= 2
@container.style.display = 'none'
@buttons = null
@state2id = null
else
@container.style.display = ''
dom = new DomBuilder()
id2state = {}
@state2id = {}
for state in [1...numStates]
color = @application.observer.getColorForState state
btnId = "select-state-#{state}"
@state2id[state] = btnId
id2state[btnId] = state
dom.tag('button').store('btn')\
.CLASS(if state is @state then 'btn-selected' else '')\
.ID(btnId)\
.a('style', "background-color:#{color}")\
.text(''+state)\
.end()
#dom.vars.btn.onclick = (e)->
@buttonContainer.appendChild dom.finalize()
@buttons = new ButtonGroup @buttonContainer, 'button'
@buttons.addEventListener 'change', (e, btnId, oldBtn)=>
if (state = id2state[btnId])?
@state = state
setState: (newState) ->
return if newState is @state
return unless @state2id[newState]?
@state = newState
if @buttons
@buttons.setButton @state2id[newState]
serverSupportsUpload = -> ((""+window.location).match /:8000\//) and true
# ============================================ app code ===============
#
if serverSupportsUpload()
console.log "Enable upload"
E('animate-controls').style.display=''
canvas = E "canvas"
context = canvas.getContext "2d"
dragHandler = null
ghostClickDetector = new GhostClickDetector
player = null
playerTimeout = 500
autoplayCriticalPopulation = 90000
doStartPlayer = ->
return if player?
runPlayerStep = ->
if application.cells.count >= autoplayCriticalPopulation
alert "Population reached #{application.cells.count}, stopping auto-play"
player = null
else
player = setTimeout( (-> application.doStep(runPlayerStep)), playerTimeout )
updatePlayButtons()
runPlayerStep()
doStopPlayer = ->
if player
clearTimeout player
player = null
updatePlayButtons()
doTogglePlayer = ->
if player
doStopPlayer()
else
doStartPlayer()
updateGenericRuleStatus = (status)->
span = E 'generic-tf-status'
span.innerHTML = status
span.setAttribute('class', 'generic-tf-status-#{status.toLowerCase()}')
updatePlayButtons = ->
E('btn-play-start').style.display = if player then "none" else ''
E('btn-play-stop').style.display = unless player then "none" else ''
dirty = true
redraw = -> dirty = true
drawEverything = (w, h, context) ->
return false unless application.observer.canDraw()
context.fillStyle = "white"
#context.clearRect 0, 0, canvas.width, canvas.height
context.fillRect 0, 0, w, h
context.save()
s = Math.min( w, h ) / 2 #
s1 = s-application.getMargin()
context.translate s, s
application.observer.draw application.cells, context, s1
context.restore()
return true
fpsLimiting = true
lastTime = Date.now()
fpsDefault = 30
dtMax = 1000.0/fpsDefault #
redrawLoop = ->
if dirty
if not fpsLimiting or ((t=Date.now()) - lastTime > dtMax)
if drawEverything canvas.width, canvas.height, context
tDraw = Date.now() - t
#adaptively update FPS
dtMax = dtMax*0.9 + tDraw*2*0.1
dirty = false
lastTime = t
requestAnimationFrame redrawLoop
isPanMode = true
doCanvasMouseDown = (e) ->
#Allow normal right-click to support image sacing
E('canvas-container').focus()
return if e.button is 2
#Only in mozilla?
canvas.setCapture? true
e.preventDefault()
[x,y] = getCanvasCursorPosition e, canvas
isPanAction = (e.button is 1) ^ (e.shiftKey) ^ (isPanMode)
unless isPanAction
application.toggleCellAt x, y
updatePopulation()
else
dragHandler = new MouseToolCombo application, x, y
doCanvasMouseUp = (e) ->
e.preventDefault()
if dragHandler isnt null
dragHandler?.mouseUp e
dragHandler = null
doCanvasTouchStart = (e)->
if e.touches.length is 1
doCanvasMouseDown(e)
e.preventDefault()
doCanvasTouchLeave = (e)->
doCanvasMouseOut(e)
doCanvasTouchEnd = (e)->
e.preventDefault()
doCanvasMouseUp(e)
doCanvasTouchMove = (e)->
doCanvasMouseMove(e)
doSetPanMode = (mode) ->
isPanMode = mode
bpan = E('btn-mode-pan')
bedit = E('btn-mode-edit')
removeClass bpan, 'button-active'
removeClass bedit, 'button-active'
addClass (if isPanMode then bpan else bedit), 'button-active'
doCanvasMouseMove = (e) ->
isPanAction = (e.shiftKey) ^ (isPanMode)
E('canvas-container').style.cursor = if isPanAction then 'move' else 'default'
if dragHandler isnt null
e.preventDefault()
dragHandler.mouseMoved e
doOpenEditor = ->
E('generic-tf-code').value = application.transitionFunc.code
E('generic-tf-editor').style.display = ''
doCloseEditor = ->
E('generic-tf-editor').style.display = 'none'
doSetRuleGeneric = ->
try
console.log "Set generic rule"
application.transitionFunc = new GenericTransitionFunc E('generic-tf-code').value
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
true
catch e
alert "Failed to parse function: #{e}"
updateGenericRuleStatus 'Error'
false
doSetGrid = ->
try
n = parseInt E('entry-n').value, 10
m = parseInt E('entry-m').value, 10
if Number.isNaN(n) or n <= 0
throw new Error "Parameter N is bad"
if Number.isNaN(m) or m <= 0
throw new Error "Parameter M is bad"
#if 1/n + 1/m <= 1/2
if 2*(n+m) >= n*m
throw new Error "Tessellation {#{n}; #{m}} is not hyperbolic and not supported."
catch e
alert ""+e
return
application.setGridImpl n, m
application.doReset()
application.animator.reset()
updatePopulation = ->
E('population').innerHTML = ""+application.cells.count
updateGeneration = ->
E('generation').innerHTML = ""+application.generation
#exportTrivial = (cells) ->
# parts = []
# cells.forItems (cell, value)->
# parts.push ""+cell
# parts.push ""+value
# return parts.join " "
doExport = ->
data = stringifyFieldData exportField application.cells
n = application.tiling.n
m = application.tiling.m
showExportDialog "#{n}$#{m}$#{data}"
doExportClose = ->
E('export-dialog').style.display = 'none'
uploadToServer = (imgname, callback)->
dataURL = canvas.toDataURL();
cb = (blob) ->
formData = new FormData()
formData.append "file", blob, imgname
ajax = getAjax()
ajax.open 'POST', '/uploads/', false
ajax.onreadystatechange = -> callback(ajax)
ajax.send(formData)
canvas.toBlob cb, "image/png"
memo = null
doMemorize = ->
memo =
cells: application.cells.copy()
viewCenter: application.observer.getViewCenter()
viewOffset: application.observer.getViewOffsetMatrix()
generation: application.generation
console.log "Position memoized"
updateMemoryButtons()
doRemember = ->
if memo is null
console.log "nothing to remember"
else
application.cells = memo.cells.copy()
application.generation = memo.generation
application.observer.navigateTo memo.viewCenter, memo.viewOffset
updatePopulation()
updateGeneration()
doClearMemory = ->
memo = null
updateMemoryButtons()
updateMemoryButtons = ->
E('btn-mem-get').disabled = E('btn-mem-clear').disabled = memo is null
encodeVisible = ->
iCenter = application.tiling.inverse application.observer.cellFromPoint(0,0)
visibleCells = new ChainMap
for [cell, state] in application.observer.visibleCells application.cells
translatedCell = application.tiling.append iCenter, cell
translatedCell = application.tiling.toCell translatedCell
visibleCells.put translatedCell, state
return exportField visibleCells
showExportDialog = (sdata) ->
E('export').value = sdata
E('export-dialog').style.display = ''
E('export').focus()
E('export').select()
doExportVisible = ->
n = application.tiling.n
m = application.tiling.m
data = stringifyFieldData encodeVisible()
showExportDialog "#{n}$#{m}$#{data}"
doShowImport = ->
E('import-dialog').style.display = ''
E('import').focus()
doImportCancel = ->
E('import-dialog').style.display = 'none'
E('import').value=''
doImport = ->
try
application.importData E('import').value
updatePopulation()
redraw()
E('import-dialog').style.display = 'none'
E('import').value=''
catch e
alert "Error parsing: #{e}"
doEditAsGeneric = ->
application.transitionFunc = application.transitionFunc.toGeneric()
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
doOpenEditor()
doDisableGeneric = ->
application.doSetRule()
doNavigateHome = ->
application.observer.navigateTo unity
# ============ Bind Events =================
E("btn-reset").addEventListener "click", ->application.doReset()
E("btn-step").addEventListener "click", ->application.doStep()
mouseMoveReceiver = E("canvas-container")
mouseMoveReceiver.addEventListener "mousedown", (e) -> doCanvasMouseDown(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mouseup", (e) -> doCanvasMouseUp(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mousemove", doCanvasMouseMove
mouseMoveReceiver.addEventListener "mousedrag", doCanvasMouseMove
mouseMoveReceiver.addEventListener "touchstart", doCanvasTouchStart
mouseMoveReceiver.addEventListener "touchend", doCanvasTouchEnd
mouseMoveReceiver.addEventListener "touchmove", doCanvasTouchMove
mouseMoveReceiver.addEventListener "touchleave", doCanvasTouchLeave
ghostClickDetector.addListeners canvas
E("btn-set-rule").addEventListener "click", (e)->application.doSetRule()
E("btn-set-rule-generic").addEventListener "click", (e)->
doSetRuleGeneric()
doCloseEditor()
E("btn-rule-generic-close-editor").addEventListener "click", doCloseEditor
E("btn-set-grid").addEventListener "click", doSetGrid
E("btn-export").addEventListener "click", doExport
E('btn-search').addEventListener 'click', ->application.doSearch()
E('btn-random').addEventListener 'click', -> application.doRandomFill()
E('btn-rule-make-generic').addEventListener 'click', doEditAsGeneric
E('btn-edit-rule').addEventListener 'click', doOpenEditor
E('btn-disable-generic-rule').addEventListener 'click', doDisableGeneric
E('btn-export-close').addEventListener 'click', doExportClose
E('btn-import').addEventListener 'click', doShowImport
E('btn-import-cancel').addEventListener 'click', doImportCancel
E('btn-import-run').addEventListener 'click', doImport
#initialize
E('btn-mem-set').addEventListener 'click', doMemorize
E('btn-mem-get').addEventListener 'click', doRemember
E('btn-mem-clear').addEventListener 'click', doClearMemory
E('btn-exp-visible').addEventListener 'click', doExportVisible
E('btn-nav-home').addEventListener 'click', doNavigateHome
window.addEventListener 'resize', updateCanvasSize
E('btn-nav-clear').addEventListener 'click', (e) -> application.navigator.clear()
E('btn-play-start').addEventListener 'click', doTogglePlayer
E('btn-play-stop').addEventListener 'click', doTogglePlayer
E('animate-set-start').addEventListener 'click', -> application.animator.setStart application.observer
E('animate-set-end').addEventListener 'click', -> application.animator.setEnd application.observer
E('animate-view-start').addEventListener 'click', -> application.animator.viewStart application.observer
E('animate-view-end').addEventListener 'click', -> application.animator.viewEnd application.observer
E('btn-animate-derotate').addEventListener 'click', -> application.animator.derotate()
E('btn-upload-animation').addEventListener 'click', (e)->
application.animator.animate application.observer, parseIntChecked(E('animate-frame-per-generation').value), parseIntChecked(E('animate-generations').value), (-> null)
E('btn-animate-cancel').addEventListener 'click', (e)->application.animator.cancelWork()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('image-fix-size').addEventListener 'click', (e)-> doSetFixedSize E('image-fix-size').checked
E('image-size').addEventListener 'change', (e) ->
E('image-fix-size').checked=true
doSetFixedSize true
E('flag-origin-mark').addEventListener 'change', (e)->
application.setDrawingHomePtr E('flag-origin-mark').checked
E('flag-live-borders').addEventListener 'change', (e)->
application.setShowLiveBorders E('flag-live-borders').checked
E('btn-mode-edit').addEventListener 'click', (e) -> doSetPanMode false
E('btn-mode-pan').addEventListener 'click', (e) -> doSetPanMode true
E('btn-db-save').addEventListener 'click', (e) -> application.saveDialog.show()
E('btn-db-load').addEventListener 'click', (e) -> application.openDialog.show()
E('btn-export-svg').addEventListener 'click', (e) -> application.doExportSvg()
E('btn-svg-export-dialog-close').addEventListener 'click', (e) -> application.svgDialog.close()
E('btn-export-uri').addEventListener 'click', (e) -> application.doExportUrl()
shortcuts =
'N': -> application.doStep()
'C': -> application.doReset()
'S': -> application.doSearch()
'R': ->application.doRandomFill()
'1': (e) -> application.paintStateSelector.setState 1
'2': (e) -> application.paintStateSelector.setState 2
'3': (e) -> application.paintStateSelector.setState 3
'4': (e) -> application.paintStateSelector.setState 4
'5': (e) -> application.paintStateSelector.setState 5
'M': doMemorize
'U': doRemember
'UA': doClearMemory
'H': doNavigateHome
'G': doTogglePlayer
'SA': (e) -> application.observer.straightenView()
'#32': doTogglePlayer
'P': (e) -> doSetPanMode true
'E': (e) -> doSetPanMode false
'SC': (e) -> application.saveDialog.show()
'OC': (e) -> application.openDialog.show()
document.addEventListener "keydown", (e)->
focused = document.activeElement
if focused and focused.tagName.toLowerCase() in ['textarea', 'input']
return
keyCode = if e.keyCode > 32 and e.keyCode < 128
String.fromCharCode e.keyCode
else
'#' + e.keyCode
keyCode += "C" if e.ctrlKey
keyCode += "A" if e.altKey
keyCode += "S" if e.shiftKey
#console.log keyCode
if (handler = shortcuts[keyCode])?
e.preventDefault()
handler(e)
##Application startup
application = new Application
application.initialize new UriConfig
doSetPanMode true
updatePopulation()
updateGeneration()
updateCanvasSize()
updateMemoryButtons()
updatePlayButtons()
redrawLoop()
#application.saveDialog.show()
| 208930 | "use strict"
#Core hyperbolic group compuatation library
{unity} = require "../core/vondyck_chain.coffee"
{ChainMap} = require "../core/chain_map.coffee"
{RegularTiling} = require "../core/regular_tiling.coffee"
{evaluateTotalisticAutomaton} = require "../core/cellular_automata.coffee"
{stringifyFieldData, parseFieldData, importField, randomFillFixedNum, exportField, randomStateGenerator} = require "../core/field.coffee"
{GenericTransitionFunc, BinaryTransitionFunc,DayNightTransitionFunc, parseTransitionFunction} = require "../core/rule.coffee"
M = require "../core/matrix3.coffee"
#Application components
{Animator} = require "./animator.coffee"
{MouseToolCombo} = require "./mousetool.coffee"
{Navigator} = require "./navigator.coffee"
{FieldObserver} = require "./observer.coffee"
{GenerateFileList, OpenDialog, SaveDialog} = require "./indexeddb.coffee"
#{FieldObserverWithRemoreRenderer} = require "./observer_remote.coffee"
#Misc utilities
{E, getAjax, ButtonGroup, windowWidth, windowHeight, documentWidth, removeClass, addClass, ValidatingInput} = require "./htmlutil.coffee"
{DomBuilder} = require "./dom_builder.coffee"
{parseIntChecked, parseFloatChecked} = require "../core/utils.coffee"
{parseUri} = require "./parseuri.coffee"
{getCanvasCursorPosition} = require "./canvas_util.coffee"
C2S = require "../ext/canvas2svg.js"
#{lzw_encode} = require "../ext/lzw.coffee"
require "../ext/polyfills.js"
require "../core/acosh_polyfill.coffee"
{GhostClickDetector} = require "./ghost_click_detector.coffee"
MIN_WIDTH = 100
minVisibleSize = 1/100
canvasSizeUpdateBlocked = false
randomFillNum = 2000
randomFillPercent = 0.4
class DefaultConfig
getGrid: -> [7,3]
getCellData: -> ""
getGeneration: -> 0
getFunctionCode: -> "B 3 S 2 3"
getViewBase: -> unity
getViewOffset: -> M.eye()
class UriConfig
constructor: ->
@keys = parseUri(""+window.location).queryKey
getGrid: ->
if @keys.grid?
try
match = @keys.grid.match /(\d+)[,;](\d+)/
throw new Error("Syntax is bad: #{@keys.grid}") unless match
n = parseIntChecked match[1]
m = parseIntChecked match[2]
return [n,m]
catch e
alert "Bad grid paramters: #{@keys.grid}"
return [7,3]
getCellData: ->@keys.cells
getGeneration: ->
if @keys.generation?
try
return parseIntChecked @keys.generation
catch e
alert "Bad generationn umber: #{@keys.generation}"
return 0
getFunctionCode: ->
if @keys.rule?
@keys.rule.replace /_/g, ' '
else
"B 3 S 2 3"
getViewBase: ->
return unity unless @keys.viewbase?
RegularTiling::parse @keys.viewbase
getViewOffset: ->
return M.eye() unless @keys.viewoffset?
[rot, dx, dy] = (parseFloatChecked part for part in @keys.viewoffset.split ':')
M.mul M.translationMatrix(dx, dy), M.rotationMatrix(rot)
class Application
constructor: ->
@tiling = null
@observer = null
@navigator = null
@animator = null
@cells = null
@generation = 0
@transitionFunc = null
@lastBinaryTransitionFunc = null
#@ObserverClass = FieldObserverWithRemoreRenderer
@ObserverClass = FieldObserver
@margin = 16 #margin pixels
setCanvasResize: (enable) -> canvasSizeUpdateBlocked = enable
getCanvasResize: -> canvasSizeUpdateBlocked
redraw: -> redraw()
getObserver: -> @observer
drawEverything: -> drawEverything canvas.width, canvas.height, context
uploadToServer: (name, cb) -> uploadToServer name, cb
getCanvas: -> canvas
getTransitionFunc: -> @transitionFunc
getMargin: -> if @observer.isDrawingHomePtr then @margin else 0
setShowLiveBorders: (isDrawing)->
@observer.isDrawingLiveBorders = isDrawing
redraw()
setDrawingHomePtr: (isDrawing)->
@observer.isDrawingHomePtr = isDrawing
redraw()
if localStorage?
localStorage.setItem "observer.isDrawingHomePtr", if isDrawing then "1" else "0"
console.log "store #{isDrawing}"
#Convert canvas X,Y coordinates to relative X,Y in (0..1) range
canvas2relative: (x,y) ->
s = Math.min(canvas.width, canvas.height) - 2*@getMargin()
isize = 2.0/s
[(x - canvas.width*0.5)*isize, (y - canvas.height*0.5)*isize]
initialize: (config = new DefaultConfig)->
[n,m] = config.getGrid()
@tiling = new RegularTiling n, m
cellData = config.getCellData()
if cellData
console.log "import: #{cellData}"
@importData cellData
else
@cells = new ChainMap
@cells.put unity, 1
@observer = new @ObserverClass @tiling, minVisibleSize, config.getViewBase(), config.getViewOffset()
if (isDrawing=localStorage?.getItem('observer.isDrawingHomePtr'))?
isDrawing = isDrawing is '1'
E('flag-origin-mark').checked = isDrawing
@observer.isDrawingHomePtr = isDrawing
console.log "restore #{isDrawing}"
else
@setDrawingHomePtr E('flag-origin-mark').checked
@setShowLiveBorders E('flag-live-borders').checked
@observer.onFinish = -> redraw()
@navigator = new Navigator this
@animator = new Animator this
@paintStateSelector = new PaintStateSelector this, E("state-selector"), E("state-selector-buttons")
@transitionFunc = parseTransitionFunction config.getFunctionCode(), application.tiling.n, application.tiling.m
@lastBinaryTransitionFunc = @transitionFunc
@openDialog = new OpenDialog this
@saveDialog = new SaveDialog this
@svgDialog = new SvgDialog this
@ruleEntry = new ValidatingInput E('rule-entry'),
((ruleStr) =>
console.log "Parsing TF {@tiling.n} {@tiling.m}"
parseTransitionFunction ruleStr, @tiling.n, @tiling.m),
((rule)->""+rule),
@transitionFunc
@ruleEntry.onparsed = (rule) => @doSetRule()
@updateRuleEditor()
@updateGridUI()
updateRuleEditor: ->
switch @transitionFunc.getType()
when "binary"
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
when "custom"
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
else
console.dir @transitionFunc
throw new Error "Bad transition func"
doSetRule: ->
if @ruleEntry.message?
alert "Failed to parse function: #{@ruleEntry.message}"
@transitionFunc = @lastBinaryTransitionFunc ? @transitionFunc
else
console.log "revalidate"
@ruleEntry.revalidate()
@transitionFunc = @ruleEntry.value
@lastBinaryTransitionFunc = @transitionFunc
@paintStateSelector.update @transitionFunc
console.log @transitionFunc
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
setGridImpl: (n, m)->
@tiling = new RegularTiling n, m
#transition function should be changed too.
if @transitionFunc?
@transitionFunc = @transitionFunc.changeGrid @tiling.n, @tiling.m
@observer?.shutdown()
oldObserver = @observer
@observer = new @ObserverClass @tiling, minVisibleSize
@observer.isDrawingHomePtr = oldObserver.isDrawingHomePtr
@observer.onFinish = -> redraw()
@navigator?.clear()
doClearMemory()
doStopPlayer()
@updateGridUI()
updateGridUI: ->
E('entry-n').value = "" + application.tiling.n
E('entry-m').value = "" + application.tiling.m
E('grid-num-neighbors').innerHTML = (@tiling.m-2)*@tiling.n
#Actions
doRandomFill: ->
randomFillFixedNum @cells, randomFillPercent, unity, randomFillNum, @tiling, randomStateGenerator(@transitionFunc.numStates)
updatePopulation()
redraw()
doStep: (onFinish)->
#Set generation for thse rules who depend on it
@transitionFunc.setGeneration @generation
@cells = evaluateTotalisticAutomaton @cells, @tiling, @transitionFunc.evaluate.bind(@transitionFunc), @transitionFunc.plus, @transitionFunc.plusInitial
@generation += 1
redraw()
updatePopulation()
updateGeneration()
onFinish?()
doReset: ->
@cells = new ChainMap
@generation = 0
@cells.put unity, 1
updatePopulation()
updateGeneration()
redraw()
doSearch: ->
found = @navigator.search @cells
updateCanvasSize()
if found > 0
@navigator.navigateToResult 0
importData: (data)->
try
console.log "importing #{data}"
match = data.match /^(\d+)\$(\d+)\$(.*)$/
throw new Error("Data format unrecognized") unless match?
n = parseIntChecked match[1]
m = parseIntChecked match[2]
if n isnt @tiling.n or m isnt @tiling.m
console.log "Need to change grid"
@setGridImpl n, m
#normzlize chain coordinates, so that importing of user-generated data could be possible
normalizeChain = (chain) => @tiling.toCell @tiling.rewrite chain
@cells = importField parseFieldData(match[3]), null, normalizeChain
console.log "Imported #{@cells.count} cells"
catch e
alert "Faield to import data: #{e}"
@cells = new ChainMap
loadData: (record, cellData) ->
assert = (x) ->
throw new Error("Assertion failure") unless x?
x
@setGridImpl assert(record.gridN), assert(record.gridM)
@animator.reset()
@cells = importField parseFieldData assert(cellData)
@generation = assert record.generation
@observer.navigateTo @tiling.parse(assert(record.base)), assert(record.offset)
console.log "LOading func type= #{record.funcType}"
switch record.funcType
when "binary"
@transitionFunc = parseTransitionFunction record.funcId, record.gridN, record.gridM
@ruleEntry.setValue @transitionFunc
when "custom"
@transitionFunc = new GenericTransitionFunc record.funcId
@paintStateSelector.update @transitionFunc
else
throw new Error "unknown TF type #{record.funcType}"
updatePopulation()
updateGeneration()
@updateRuleEditor()
redraw()
getSaveData: (fname)->
#[data, catalogRecord]
fieldData = stringifyFieldData exportField @cells
funcId = ""+@getTransitionFunc()
funcType = @getTransitionFunc().getType()
catalogRecord =
gridN: @tiling.n
gridM: @tiling.m
name: fname
funcId: funcId
funcType: funcType
base: @getObserver().getViewCenter().toString()
offset: @getObserver().getViewOffsetMatrix()
size: fieldData.length
time: Date.now()
field: null
generation: @generation
return [fieldData, catalogRecord]
toggleCellAt: (x,y) ->
[xp, yp] = @canvas2relative x, y
try
cell = @observer.cellFromPoint xp, yp
catch e
return
if @cells.get(cell) is @paintStateSelector.state
@cells.remove cell
else
@cells.put cell, @paintStateSelector.state
redraw()
doExportSvg: ->
sz = 512
svgContext = new C2S sz, sz
drawEverything sz, sz, svgContext
# Show the generated SVG image
@svgDialog.show svgContext.getSerializedSvg()
doExportUrl: ->
#Export field state as URL
keys = []
keys.push "grid=#{@tiling.n},#{@tiling.m}"
if @cells.count != 0
keys.push "cells=#{@tiling.n}$#{@tiling.m}$#{stringifyFieldData exportField @cells}"
keys.push "generation=#{@generation}"
if @transitionFunc.getType() is "binary"
ruleStr = ""+@transitionFunc
ruleStr = ruleStr.replace /\s/g, '_'
keys.push "rule=<KEY>ruleStr}"
keys.push "viewbase=#{@getObserver().getViewCenter()}"
[rot, dx, dy] = M.hyperbolicDecompose @getObserver().getViewOffsetMatrix()
keys.push "viewoffset=#{rot<KEY>}:#{<KEY>
basePath = location.href.replace(location.search, '')
uri = basePath + "?" + keys.join("&")
showExportDialog uri
class SvgDialog
constructor: (@application) ->
@dialog = E('svg-export-dialog')
@imgContainer = E('svg-image-container')
close: ->
@imgContainer.innerHTML = ""
@dialog.style.display="none"
show: (svg) ->
dataUri = "data:image/svg+xml;utf8," + encodeURIComponent(svg)
dom = new DomBuilder()
dom.tag('img').a('src', dataUri).a('alt', 'SVG image').a('title', 'Use right click to save SVG image').end()
@imgContainer.innerHTML = ""
@imgContainer.appendChild dom.finalize()
#@imgContainer.innerHTML = svg
@dialog.style.display=""
updateCanvasSize = ->
return if canvasSizeUpdateBlocked
docW = documentWidth()
winW = windowWidth()
if docW > winW
console.log "overflow"
usedWidth = docW - canvas.width
#console.log "#Win: #{windowWidth()}, doc: #{documentWidth()}, used: #{usedWidth}"
w = winW - usedWidth
else
#console.log "underflow"
containerAvail=E('canvas-container').clientWidth
#console.log "awail width: #{containerAvail}"
w = containerAvail
#now calculae available height
canvasRect = canvas.getBoundingClientRect()
winH = windowHeight()
h = winH - canvasRect.top
navWrap = E('navigator-wrap')
navWrap.style.height = "#{winH - navWrap.getBoundingClientRect().top - 16}px"
#get the smaller of both
w = Math.min(w,h)
#reduce it a bit
w -= 16
#make width multiple of 16
w = w & ~ 15
#console.log "New w is #{w}"
if w <= MIN_WIDTH
w = MIN_WIDTH
if canvas.width isnt w
canvas.width = canvas.height = w
redraw()
E('image-size').value = ""+w
return
doSetFixedSize = (isFixed) ->
if isFixed
size = parseIntChecked E('image-size').value
if size <= 0 or size >=65536
throw new Error "Bad size: #{size}"
canvasSizeUpdateBlocked = true
canvas.width = canvas.height = size
redraw()
else
canvasSizeUpdateBlocked = false
updateCanvasSize()
class PaintStateSelector
constructor: (@application, @container, @buttonContainer)->
@state = 1
@numStates = 2
update: ->
numStates = @application.getTransitionFunc().numStates
#only do something if number of states changed
return if numStates == @numStates
@numStates = numStates
console.log "Num states changed to #{numStates}"
if @state >= numStates
@state = 1
@buttonContainer.innerHTML = ''
if numStates <= 2
@container.style.display = 'none'
@buttons = null
@state2id = null
else
@container.style.display = ''
dom = new DomBuilder()
id2state = {}
@state2id = {}
for state in [1...numStates]
color = @application.observer.getColorForState state
btnId = "select-state-#{state}"
@state2id[state] = btnId
id2state[btnId] = state
dom.tag('button').store('btn')\
.CLASS(if state is @state then 'btn-selected' else '')\
.ID(btnId)\
.a('style', "background-color:#{color}")\
.text(''+state)\
.end()
#dom.vars.btn.onclick = (e)->
@buttonContainer.appendChild dom.finalize()
@buttons = new ButtonGroup @buttonContainer, 'button'
@buttons.addEventListener 'change', (e, btnId, oldBtn)=>
if (state = id2state[btnId])?
@state = state
setState: (newState) ->
return if newState is @state
return unless @state2id[newState]?
@state = newState
if @buttons
@buttons.setButton @state2id[newState]
serverSupportsUpload = -> ((""+window.location).match /:8000\//) and true
# ============================================ app code ===============
#
if serverSupportsUpload()
console.log "Enable upload"
E('animate-controls').style.display=''
canvas = E "canvas"
context = canvas.getContext "2d"
dragHandler = null
ghostClickDetector = new GhostClickDetector
player = null
playerTimeout = 500
autoplayCriticalPopulation = 90000
doStartPlayer = ->
return if player?
runPlayerStep = ->
if application.cells.count >= autoplayCriticalPopulation
alert "Population reached #{application.cells.count}, stopping auto-play"
player = null
else
player = setTimeout( (-> application.doStep(runPlayerStep)), playerTimeout )
updatePlayButtons()
runPlayerStep()
doStopPlayer = ->
if player
clearTimeout player
player = null
updatePlayButtons()
doTogglePlayer = ->
if player
doStopPlayer()
else
doStartPlayer()
updateGenericRuleStatus = (status)->
span = E 'generic-tf-status'
span.innerHTML = status
span.setAttribute('class', 'generic-tf-status-#{status.toLowerCase()}')
updatePlayButtons = ->
E('btn-play-start').style.display = if player then "none" else ''
E('btn-play-stop').style.display = unless player then "none" else ''
dirty = true
redraw = -> dirty = true
drawEverything = (w, h, context) ->
return false unless application.observer.canDraw()
context.fillStyle = "white"
#context.clearRect 0, 0, canvas.width, canvas.height
context.fillRect 0, 0, w, h
context.save()
s = Math.min( w, h ) / 2 #
s1 = s-application.getMargin()
context.translate s, s
application.observer.draw application.cells, context, s1
context.restore()
return true
fpsLimiting = true
lastTime = Date.now()
fpsDefault = 30
dtMax = 1000.0/fpsDefault #
redrawLoop = ->
if dirty
if not fpsLimiting or ((t=Date.now()) - lastTime > dtMax)
if drawEverything canvas.width, canvas.height, context
tDraw = Date.now() - t
#adaptively update FPS
dtMax = dtMax*0.9 + tDraw*2*0.1
dirty = false
lastTime = t
requestAnimationFrame redrawLoop
isPanMode = true
doCanvasMouseDown = (e) ->
#Allow normal right-click to support image sacing
E('canvas-container').focus()
return if e.button is 2
#Only in mozilla?
canvas.setCapture? true
e.preventDefault()
[x,y] = getCanvasCursorPosition e, canvas
isPanAction = (e.button is 1) ^ (e.shiftKey) ^ (isPanMode)
unless isPanAction
application.toggleCellAt x, y
updatePopulation()
else
dragHandler = new MouseToolCombo application, x, y
doCanvasMouseUp = (e) ->
e.preventDefault()
if dragHandler isnt null
dragHandler?.mouseUp e
dragHandler = null
doCanvasTouchStart = (e)->
if e.touches.length is 1
doCanvasMouseDown(e)
e.preventDefault()
doCanvasTouchLeave = (e)->
doCanvasMouseOut(e)
doCanvasTouchEnd = (e)->
e.preventDefault()
doCanvasMouseUp(e)
doCanvasTouchMove = (e)->
doCanvasMouseMove(e)
doSetPanMode = (mode) ->
isPanMode = mode
bpan = E('btn-mode-pan')
bedit = E('btn-mode-edit')
removeClass bpan, 'button-active'
removeClass bedit, 'button-active'
addClass (if isPanMode then bpan else bedit), 'button-active'
doCanvasMouseMove = (e) ->
isPanAction = (e.shiftKey) ^ (isPanMode)
E('canvas-container').style.cursor = if isPanAction then 'move' else 'default'
if dragHandler isnt null
e.preventDefault()
dragHandler.mouseMoved e
doOpenEditor = ->
E('generic-tf-code').value = application.transitionFunc.code
E('generic-tf-editor').style.display = ''
doCloseEditor = ->
E('generic-tf-editor').style.display = 'none'
doSetRuleGeneric = ->
try
console.log "Set generic rule"
application.transitionFunc = new GenericTransitionFunc E('generic-tf-code').value
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
true
catch e
alert "Failed to parse function: #{e}"
updateGenericRuleStatus 'Error'
false
doSetGrid = ->
try
n = parseInt E('entry-n').value, 10
m = parseInt E('entry-m').value, 10
if Number.isNaN(n) or n <= 0
throw new Error "Parameter N is bad"
if Number.isNaN(m) or m <= 0
throw new Error "Parameter M is bad"
#if 1/n + 1/m <= 1/2
if 2*(n+m) >= n*m
throw new Error "Tessellation {#{n}; #{m}} is not hyperbolic and not supported."
catch e
alert ""+e
return
application.setGridImpl n, m
application.doReset()
application.animator.reset()
updatePopulation = ->
E('population').innerHTML = ""+application.cells.count
updateGeneration = ->
E('generation').innerHTML = ""+application.generation
#exportTrivial = (cells) ->
# parts = []
# cells.forItems (cell, value)->
# parts.push ""+cell
# parts.push ""+value
# return parts.join " "
doExport = ->
data = stringifyFieldData exportField application.cells
n = application.tiling.n
m = application.tiling.m
showExportDialog "#{n}$#{m}$#{data}"
doExportClose = ->
E('export-dialog').style.display = 'none'
uploadToServer = (imgname, callback)->
dataURL = canvas.toDataURL();
cb = (blob) ->
formData = new FormData()
formData.append "file", blob, imgname
ajax = getAjax()
ajax.open 'POST', '/uploads/', false
ajax.onreadystatechange = -> callback(ajax)
ajax.send(formData)
canvas.toBlob cb, "image/png"
memo = null
doMemorize = ->
memo =
cells: application.cells.copy()
viewCenter: application.observer.getViewCenter()
viewOffset: application.observer.getViewOffsetMatrix()
generation: application.generation
console.log "Position memoized"
updateMemoryButtons()
doRemember = ->
if memo is null
console.log "nothing to remember"
else
application.cells = memo.cells.copy()
application.generation = memo.generation
application.observer.navigateTo memo.viewCenter, memo.viewOffset
updatePopulation()
updateGeneration()
doClearMemory = ->
memo = null
updateMemoryButtons()
updateMemoryButtons = ->
E('btn-mem-get').disabled = E('btn-mem-clear').disabled = memo is null
encodeVisible = ->
iCenter = application.tiling.inverse application.observer.cellFromPoint(0,0)
visibleCells = new ChainMap
for [cell, state] in application.observer.visibleCells application.cells
translatedCell = application.tiling.append iCenter, cell
translatedCell = application.tiling.toCell translatedCell
visibleCells.put translatedCell, state
return exportField visibleCells
showExportDialog = (sdata) ->
E('export').value = sdata
E('export-dialog').style.display = ''
E('export').focus()
E('export').select()
doExportVisible = ->
n = application.tiling.n
m = application.tiling.m
data = stringifyFieldData encodeVisible()
showExportDialog "#{n}$#{m}$#{data}"
doShowImport = ->
E('import-dialog').style.display = ''
E('import').focus()
doImportCancel = ->
E('import-dialog').style.display = 'none'
E('import').value=''
doImport = ->
try
application.importData E('import').value
updatePopulation()
redraw()
E('import-dialog').style.display = 'none'
E('import').value=''
catch e
alert "Error parsing: #{e}"
doEditAsGeneric = ->
application.transitionFunc = application.transitionFunc.toGeneric()
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
doOpenEditor()
doDisableGeneric = ->
application.doSetRule()
doNavigateHome = ->
application.observer.navigateTo unity
# ============ Bind Events =================
E("btn-reset").addEventListener "click", ->application.doReset()
E("btn-step").addEventListener "click", ->application.doStep()
mouseMoveReceiver = E("canvas-container")
mouseMoveReceiver.addEventListener "mousedown", (e) -> doCanvasMouseDown(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mouseup", (e) -> doCanvasMouseUp(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mousemove", doCanvasMouseMove
mouseMoveReceiver.addEventListener "mousedrag", doCanvasMouseMove
mouseMoveReceiver.addEventListener "touchstart", doCanvasTouchStart
mouseMoveReceiver.addEventListener "touchend", doCanvasTouchEnd
mouseMoveReceiver.addEventListener "touchmove", doCanvasTouchMove
mouseMoveReceiver.addEventListener "touchleave", doCanvasTouchLeave
ghostClickDetector.addListeners canvas
E("btn-set-rule").addEventListener "click", (e)->application.doSetRule()
E("btn-set-rule-generic").addEventListener "click", (e)->
doSetRuleGeneric()
doCloseEditor()
E("btn-rule-generic-close-editor").addEventListener "click", doCloseEditor
E("btn-set-grid").addEventListener "click", doSetGrid
E("btn-export").addEventListener "click", doExport
E('btn-search').addEventListener 'click', ->application.doSearch()
E('btn-random').addEventListener 'click', -> application.doRandomFill()
E('btn-rule-make-generic').addEventListener 'click', doEditAsGeneric
E('btn-edit-rule').addEventListener 'click', doOpenEditor
E('btn-disable-generic-rule').addEventListener 'click', doDisableGeneric
E('btn-export-close').addEventListener 'click', doExportClose
E('btn-import').addEventListener 'click', doShowImport
E('btn-import-cancel').addEventListener 'click', doImportCancel
E('btn-import-run').addEventListener 'click', doImport
#initialize
E('btn-mem-set').addEventListener 'click', doMemorize
E('btn-mem-get').addEventListener 'click', doRemember
E('btn-mem-clear').addEventListener 'click', doClearMemory
E('btn-exp-visible').addEventListener 'click', doExportVisible
E('btn-nav-home').addEventListener 'click', doNavigateHome
window.addEventListener 'resize', updateCanvasSize
E('btn-nav-clear').addEventListener 'click', (e) -> application.navigator.clear()
E('btn-play-start').addEventListener 'click', doTogglePlayer
E('btn-play-stop').addEventListener 'click', doTogglePlayer
E('animate-set-start').addEventListener 'click', -> application.animator.setStart application.observer
E('animate-set-end').addEventListener 'click', -> application.animator.setEnd application.observer
E('animate-view-start').addEventListener 'click', -> application.animator.viewStart application.observer
E('animate-view-end').addEventListener 'click', -> application.animator.viewEnd application.observer
E('btn-animate-derotate').addEventListener 'click', -> application.animator.derotate()
E('btn-upload-animation').addEventListener 'click', (e)->
application.animator.animate application.observer, parseIntChecked(E('animate-frame-per-generation').value), parseIntChecked(E('animate-generations').value), (-> null)
E('btn-animate-cancel').addEventListener 'click', (e)->application.animator.cancelWork()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('image-fix-size').addEventListener 'click', (e)-> doSetFixedSize E('image-fix-size').checked
E('image-size').addEventListener 'change', (e) ->
E('image-fix-size').checked=true
doSetFixedSize true
E('flag-origin-mark').addEventListener 'change', (e)->
application.setDrawingHomePtr E('flag-origin-mark').checked
E('flag-live-borders').addEventListener 'change', (e)->
application.setShowLiveBorders E('flag-live-borders').checked
E('btn-mode-edit').addEventListener 'click', (e) -> doSetPanMode false
E('btn-mode-pan').addEventListener 'click', (e) -> doSetPanMode true
E('btn-db-save').addEventListener 'click', (e) -> application.saveDialog.show()
E('btn-db-load').addEventListener 'click', (e) -> application.openDialog.show()
E('btn-export-svg').addEventListener 'click', (e) -> application.doExportSvg()
E('btn-svg-export-dialog-close').addEventListener 'click', (e) -> application.svgDialog.close()
E('btn-export-uri').addEventListener 'click', (e) -> application.doExportUrl()
shortcuts =
'N': -> application.doStep()
'C': -> application.doReset()
'S': -> application.doSearch()
'R': ->application.doRandomFill()
'1': (e) -> application.paintStateSelector.setState 1
'2': (e) -> application.paintStateSelector.setState 2
'3': (e) -> application.paintStateSelector.setState 3
'4': (e) -> application.paintStateSelector.setState 4
'5': (e) -> application.paintStateSelector.setState 5
'M': doMemorize
'U': doRemember
'UA': doClearMemory
'H': doNavigateHome
'G': doTogglePlayer
'SA': (e) -> application.observer.straightenView()
'#32': doTogglePlayer
'P': (e) -> doSetPanMode true
'E': (e) -> doSetPanMode false
'SC': (e) -> application.saveDialog.show()
'OC': (e) -> application.openDialog.show()
document.addEventListener "keydown", (e)->
focused = document.activeElement
if focused and focused.tagName.toLowerCase() in ['textarea', 'input']
return
keyCode = if e.keyCode > 32 and e.keyCode < 128
String.fromCharCode e.keyCode
else
'#' + e.keyCode
keyCode += "C" if e.ctrlKey
keyCode += "A" if e.altKey
keyCode += "S" if e.shiftKey
#console.log keyCode
if (handler = shortcuts[keyCode])?
e.preventDefault()
handler(e)
##Application startup
application = new Application
application.initialize new UriConfig
doSetPanMode true
updatePopulation()
updateGeneration()
updateCanvasSize()
updateMemoryButtons()
updatePlayButtons()
redrawLoop()
#application.saveDialog.show()
| true | "use strict"
#Core hyperbolic group compuatation library
{unity} = require "../core/vondyck_chain.coffee"
{ChainMap} = require "../core/chain_map.coffee"
{RegularTiling} = require "../core/regular_tiling.coffee"
{evaluateTotalisticAutomaton} = require "../core/cellular_automata.coffee"
{stringifyFieldData, parseFieldData, importField, randomFillFixedNum, exportField, randomStateGenerator} = require "../core/field.coffee"
{GenericTransitionFunc, BinaryTransitionFunc,DayNightTransitionFunc, parseTransitionFunction} = require "../core/rule.coffee"
M = require "../core/matrix3.coffee"
#Application components
{Animator} = require "./animator.coffee"
{MouseToolCombo} = require "./mousetool.coffee"
{Navigator} = require "./navigator.coffee"
{FieldObserver} = require "./observer.coffee"
{GenerateFileList, OpenDialog, SaveDialog} = require "./indexeddb.coffee"
#{FieldObserverWithRemoreRenderer} = require "./observer_remote.coffee"
#Misc utilities
{E, getAjax, ButtonGroup, windowWidth, windowHeight, documentWidth, removeClass, addClass, ValidatingInput} = require "./htmlutil.coffee"
{DomBuilder} = require "./dom_builder.coffee"
{parseIntChecked, parseFloatChecked} = require "../core/utils.coffee"
{parseUri} = require "./parseuri.coffee"
{getCanvasCursorPosition} = require "./canvas_util.coffee"
C2S = require "../ext/canvas2svg.js"
#{lzw_encode} = require "../ext/lzw.coffee"
require "../ext/polyfills.js"
require "../core/acosh_polyfill.coffee"
{GhostClickDetector} = require "./ghost_click_detector.coffee"
MIN_WIDTH = 100
minVisibleSize = 1/100
canvasSizeUpdateBlocked = false
randomFillNum = 2000
randomFillPercent = 0.4
class DefaultConfig
getGrid: -> [7,3]
getCellData: -> ""
getGeneration: -> 0
getFunctionCode: -> "B 3 S 2 3"
getViewBase: -> unity
getViewOffset: -> M.eye()
class UriConfig
constructor: ->
@keys = parseUri(""+window.location).queryKey
getGrid: ->
if @keys.grid?
try
match = @keys.grid.match /(\d+)[,;](\d+)/
throw new Error("Syntax is bad: #{@keys.grid}") unless match
n = parseIntChecked match[1]
m = parseIntChecked match[2]
return [n,m]
catch e
alert "Bad grid paramters: #{@keys.grid}"
return [7,3]
getCellData: ->@keys.cells
getGeneration: ->
if @keys.generation?
try
return parseIntChecked @keys.generation
catch e
alert "Bad generationn umber: #{@keys.generation}"
return 0
getFunctionCode: ->
if @keys.rule?
@keys.rule.replace /_/g, ' '
else
"B 3 S 2 3"
getViewBase: ->
return unity unless @keys.viewbase?
RegularTiling::parse @keys.viewbase
getViewOffset: ->
return M.eye() unless @keys.viewoffset?
[rot, dx, dy] = (parseFloatChecked part for part in @keys.viewoffset.split ':')
M.mul M.translationMatrix(dx, dy), M.rotationMatrix(rot)
class Application
constructor: ->
@tiling = null
@observer = null
@navigator = null
@animator = null
@cells = null
@generation = 0
@transitionFunc = null
@lastBinaryTransitionFunc = null
#@ObserverClass = FieldObserverWithRemoreRenderer
@ObserverClass = FieldObserver
@margin = 16 #margin pixels
setCanvasResize: (enable) -> canvasSizeUpdateBlocked = enable
getCanvasResize: -> canvasSizeUpdateBlocked
redraw: -> redraw()
getObserver: -> @observer
drawEverything: -> drawEverything canvas.width, canvas.height, context
uploadToServer: (name, cb) -> uploadToServer name, cb
getCanvas: -> canvas
getTransitionFunc: -> @transitionFunc
getMargin: -> if @observer.isDrawingHomePtr then @margin else 0
setShowLiveBorders: (isDrawing)->
@observer.isDrawingLiveBorders = isDrawing
redraw()
setDrawingHomePtr: (isDrawing)->
@observer.isDrawingHomePtr = isDrawing
redraw()
if localStorage?
localStorage.setItem "observer.isDrawingHomePtr", if isDrawing then "1" else "0"
console.log "store #{isDrawing}"
#Convert canvas X,Y coordinates to relative X,Y in (0..1) range
canvas2relative: (x,y) ->
s = Math.min(canvas.width, canvas.height) - 2*@getMargin()
isize = 2.0/s
[(x - canvas.width*0.5)*isize, (y - canvas.height*0.5)*isize]
initialize: (config = new DefaultConfig)->
[n,m] = config.getGrid()
@tiling = new RegularTiling n, m
cellData = config.getCellData()
if cellData
console.log "import: #{cellData}"
@importData cellData
else
@cells = new ChainMap
@cells.put unity, 1
@observer = new @ObserverClass @tiling, minVisibleSize, config.getViewBase(), config.getViewOffset()
if (isDrawing=localStorage?.getItem('observer.isDrawingHomePtr'))?
isDrawing = isDrawing is '1'
E('flag-origin-mark').checked = isDrawing
@observer.isDrawingHomePtr = isDrawing
console.log "restore #{isDrawing}"
else
@setDrawingHomePtr E('flag-origin-mark').checked
@setShowLiveBorders E('flag-live-borders').checked
@observer.onFinish = -> redraw()
@navigator = new Navigator this
@animator = new Animator this
@paintStateSelector = new PaintStateSelector this, E("state-selector"), E("state-selector-buttons")
@transitionFunc = parseTransitionFunction config.getFunctionCode(), application.tiling.n, application.tiling.m
@lastBinaryTransitionFunc = @transitionFunc
@openDialog = new OpenDialog this
@saveDialog = new SaveDialog this
@svgDialog = new SvgDialog this
@ruleEntry = new ValidatingInput E('rule-entry'),
((ruleStr) =>
console.log "Parsing TF {@tiling.n} {@tiling.m}"
parseTransitionFunction ruleStr, @tiling.n, @tiling.m),
((rule)->""+rule),
@transitionFunc
@ruleEntry.onparsed = (rule) => @doSetRule()
@updateRuleEditor()
@updateGridUI()
updateRuleEditor: ->
switch @transitionFunc.getType()
when "binary"
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
when "custom"
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
else
console.dir @transitionFunc
throw new Error "Bad transition func"
doSetRule: ->
if @ruleEntry.message?
alert "Failed to parse function: #{@ruleEntry.message}"
@transitionFunc = @lastBinaryTransitionFunc ? @transitionFunc
else
console.log "revalidate"
@ruleEntry.revalidate()
@transitionFunc = @ruleEntry.value
@lastBinaryTransitionFunc = @transitionFunc
@paintStateSelector.update @transitionFunc
console.log @transitionFunc
E('controls-rule-simple').style.display=""
E('controls-rule-generic').style.display="none"
setGridImpl: (n, m)->
@tiling = new RegularTiling n, m
#transition function should be changed too.
if @transitionFunc?
@transitionFunc = @transitionFunc.changeGrid @tiling.n, @tiling.m
@observer?.shutdown()
oldObserver = @observer
@observer = new @ObserverClass @tiling, minVisibleSize
@observer.isDrawingHomePtr = oldObserver.isDrawingHomePtr
@observer.onFinish = -> redraw()
@navigator?.clear()
doClearMemory()
doStopPlayer()
@updateGridUI()
updateGridUI: ->
E('entry-n').value = "" + application.tiling.n
E('entry-m').value = "" + application.tiling.m
E('grid-num-neighbors').innerHTML = (@tiling.m-2)*@tiling.n
#Actions
doRandomFill: ->
randomFillFixedNum @cells, randomFillPercent, unity, randomFillNum, @tiling, randomStateGenerator(@transitionFunc.numStates)
updatePopulation()
redraw()
doStep: (onFinish)->
#Set generation for thse rules who depend on it
@transitionFunc.setGeneration @generation
@cells = evaluateTotalisticAutomaton @cells, @tiling, @transitionFunc.evaluate.bind(@transitionFunc), @transitionFunc.plus, @transitionFunc.plusInitial
@generation += 1
redraw()
updatePopulation()
updateGeneration()
onFinish?()
doReset: ->
@cells = new ChainMap
@generation = 0
@cells.put unity, 1
updatePopulation()
updateGeneration()
redraw()
doSearch: ->
found = @navigator.search @cells
updateCanvasSize()
if found > 0
@navigator.navigateToResult 0
importData: (data)->
try
console.log "importing #{data}"
match = data.match /^(\d+)\$(\d+)\$(.*)$/
throw new Error("Data format unrecognized") unless match?
n = parseIntChecked match[1]
m = parseIntChecked match[2]
if n isnt @tiling.n or m isnt @tiling.m
console.log "Need to change grid"
@setGridImpl n, m
#normzlize chain coordinates, so that importing of user-generated data could be possible
normalizeChain = (chain) => @tiling.toCell @tiling.rewrite chain
@cells = importField parseFieldData(match[3]), null, normalizeChain
console.log "Imported #{@cells.count} cells"
catch e
alert "Faield to import data: #{e}"
@cells = new ChainMap
loadData: (record, cellData) ->
assert = (x) ->
throw new Error("Assertion failure") unless x?
x
@setGridImpl assert(record.gridN), assert(record.gridM)
@animator.reset()
@cells = importField parseFieldData assert(cellData)
@generation = assert record.generation
@observer.navigateTo @tiling.parse(assert(record.base)), assert(record.offset)
console.log "LOading func type= #{record.funcType}"
switch record.funcType
when "binary"
@transitionFunc = parseTransitionFunction record.funcId, record.gridN, record.gridM
@ruleEntry.setValue @transitionFunc
when "custom"
@transitionFunc = new GenericTransitionFunc record.funcId
@paintStateSelector.update @transitionFunc
else
throw new Error "unknown TF type #{record.funcType}"
updatePopulation()
updateGeneration()
@updateRuleEditor()
redraw()
getSaveData: (fname)->
#[data, catalogRecord]
fieldData = stringifyFieldData exportField @cells
funcId = ""+@getTransitionFunc()
funcType = @getTransitionFunc().getType()
catalogRecord =
gridN: @tiling.n
gridM: @tiling.m
name: fname
funcId: funcId
funcType: funcType
base: @getObserver().getViewCenter().toString()
offset: @getObserver().getViewOffsetMatrix()
size: fieldData.length
time: Date.now()
field: null
generation: @generation
return [fieldData, catalogRecord]
toggleCellAt: (x,y) ->
[xp, yp] = @canvas2relative x, y
try
cell = @observer.cellFromPoint xp, yp
catch e
return
if @cells.get(cell) is @paintStateSelector.state
@cells.remove cell
else
@cells.put cell, @paintStateSelector.state
redraw()
doExportSvg: ->
sz = 512
svgContext = new C2S sz, sz
drawEverything sz, sz, svgContext
# Show the generated SVG image
@svgDialog.show svgContext.getSerializedSvg()
doExportUrl: ->
#Export field state as URL
keys = []
keys.push "grid=#{@tiling.n},#{@tiling.m}"
if @cells.count != 0
keys.push "cells=#{@tiling.n}$#{@tiling.m}$#{stringifyFieldData exportField @cells}"
keys.push "generation=#{@generation}"
if @transitionFunc.getType() is "binary"
ruleStr = ""+@transitionFunc
ruleStr = ruleStr.replace /\s/g, '_'
keys.push "rule=PI:KEY:<KEY>END_PIruleStr}"
keys.push "viewbase=#{@getObserver().getViewCenter()}"
[rot, dx, dy] = M.hyperbolicDecompose @getObserver().getViewOffsetMatrix()
keys.push "viewoffset=#{rotPI:KEY:<KEY>END_PI}:#{PI:KEY:<KEY>END_PI
basePath = location.href.replace(location.search, '')
uri = basePath + "?" + keys.join("&")
showExportDialog uri
class SvgDialog
constructor: (@application) ->
@dialog = E('svg-export-dialog')
@imgContainer = E('svg-image-container')
close: ->
@imgContainer.innerHTML = ""
@dialog.style.display="none"
show: (svg) ->
dataUri = "data:image/svg+xml;utf8," + encodeURIComponent(svg)
dom = new DomBuilder()
dom.tag('img').a('src', dataUri).a('alt', 'SVG image').a('title', 'Use right click to save SVG image').end()
@imgContainer.innerHTML = ""
@imgContainer.appendChild dom.finalize()
#@imgContainer.innerHTML = svg
@dialog.style.display=""
updateCanvasSize = ->
return if canvasSizeUpdateBlocked
docW = documentWidth()
winW = windowWidth()
if docW > winW
console.log "overflow"
usedWidth = docW - canvas.width
#console.log "#Win: #{windowWidth()}, doc: #{documentWidth()}, used: #{usedWidth}"
w = winW - usedWidth
else
#console.log "underflow"
containerAvail=E('canvas-container').clientWidth
#console.log "awail width: #{containerAvail}"
w = containerAvail
#now calculae available height
canvasRect = canvas.getBoundingClientRect()
winH = windowHeight()
h = winH - canvasRect.top
navWrap = E('navigator-wrap')
navWrap.style.height = "#{winH - navWrap.getBoundingClientRect().top - 16}px"
#get the smaller of both
w = Math.min(w,h)
#reduce it a bit
w -= 16
#make width multiple of 16
w = w & ~ 15
#console.log "New w is #{w}"
if w <= MIN_WIDTH
w = MIN_WIDTH
if canvas.width isnt w
canvas.width = canvas.height = w
redraw()
E('image-size').value = ""+w
return
doSetFixedSize = (isFixed) ->
if isFixed
size = parseIntChecked E('image-size').value
if size <= 0 or size >=65536
throw new Error "Bad size: #{size}"
canvasSizeUpdateBlocked = true
canvas.width = canvas.height = size
redraw()
else
canvasSizeUpdateBlocked = false
updateCanvasSize()
class PaintStateSelector
constructor: (@application, @container, @buttonContainer)->
@state = 1
@numStates = 2
update: ->
numStates = @application.getTransitionFunc().numStates
#only do something if number of states changed
return if numStates == @numStates
@numStates = numStates
console.log "Num states changed to #{numStates}"
if @state >= numStates
@state = 1
@buttonContainer.innerHTML = ''
if numStates <= 2
@container.style.display = 'none'
@buttons = null
@state2id = null
else
@container.style.display = ''
dom = new DomBuilder()
id2state = {}
@state2id = {}
for state in [1...numStates]
color = @application.observer.getColorForState state
btnId = "select-state-#{state}"
@state2id[state] = btnId
id2state[btnId] = state
dom.tag('button').store('btn')\
.CLASS(if state is @state then 'btn-selected' else '')\
.ID(btnId)\
.a('style', "background-color:#{color}")\
.text(''+state)\
.end()
#dom.vars.btn.onclick = (e)->
@buttonContainer.appendChild dom.finalize()
@buttons = new ButtonGroup @buttonContainer, 'button'
@buttons.addEventListener 'change', (e, btnId, oldBtn)=>
if (state = id2state[btnId])?
@state = state
setState: (newState) ->
return if newState is @state
return unless @state2id[newState]?
@state = newState
if @buttons
@buttons.setButton @state2id[newState]
serverSupportsUpload = -> ((""+window.location).match /:8000\//) and true
# ============================================ app code ===============
#
if serverSupportsUpload()
console.log "Enable upload"
E('animate-controls').style.display=''
canvas = E "canvas"
context = canvas.getContext "2d"
dragHandler = null
ghostClickDetector = new GhostClickDetector
player = null
playerTimeout = 500
autoplayCriticalPopulation = 90000
doStartPlayer = ->
return if player?
runPlayerStep = ->
if application.cells.count >= autoplayCriticalPopulation
alert "Population reached #{application.cells.count}, stopping auto-play"
player = null
else
player = setTimeout( (-> application.doStep(runPlayerStep)), playerTimeout )
updatePlayButtons()
runPlayerStep()
doStopPlayer = ->
if player
clearTimeout player
player = null
updatePlayButtons()
doTogglePlayer = ->
if player
doStopPlayer()
else
doStartPlayer()
updateGenericRuleStatus = (status)->
span = E 'generic-tf-status'
span.innerHTML = status
span.setAttribute('class', 'generic-tf-status-#{status.toLowerCase()}')
updatePlayButtons = ->
E('btn-play-start').style.display = if player then "none" else ''
E('btn-play-stop').style.display = unless player then "none" else ''
dirty = true
redraw = -> dirty = true
drawEverything = (w, h, context) ->
return false unless application.observer.canDraw()
context.fillStyle = "white"
#context.clearRect 0, 0, canvas.width, canvas.height
context.fillRect 0, 0, w, h
context.save()
s = Math.min( w, h ) / 2 #
s1 = s-application.getMargin()
context.translate s, s
application.observer.draw application.cells, context, s1
context.restore()
return true
fpsLimiting = true
lastTime = Date.now()
fpsDefault = 30
dtMax = 1000.0/fpsDefault #
redrawLoop = ->
if dirty
if not fpsLimiting or ((t=Date.now()) - lastTime > dtMax)
if drawEverything canvas.width, canvas.height, context
tDraw = Date.now() - t
#adaptively update FPS
dtMax = dtMax*0.9 + tDraw*2*0.1
dirty = false
lastTime = t
requestAnimationFrame redrawLoop
isPanMode = true
doCanvasMouseDown = (e) ->
#Allow normal right-click to support image sacing
E('canvas-container').focus()
return if e.button is 2
#Only in mozilla?
canvas.setCapture? true
e.preventDefault()
[x,y] = getCanvasCursorPosition e, canvas
isPanAction = (e.button is 1) ^ (e.shiftKey) ^ (isPanMode)
unless isPanAction
application.toggleCellAt x, y
updatePopulation()
else
dragHandler = new MouseToolCombo application, x, y
doCanvasMouseUp = (e) ->
e.preventDefault()
if dragHandler isnt null
dragHandler?.mouseUp e
dragHandler = null
doCanvasTouchStart = (e)->
if e.touches.length is 1
doCanvasMouseDown(e)
e.preventDefault()
doCanvasTouchLeave = (e)->
doCanvasMouseOut(e)
doCanvasTouchEnd = (e)->
e.preventDefault()
doCanvasMouseUp(e)
doCanvasTouchMove = (e)->
doCanvasMouseMove(e)
doSetPanMode = (mode) ->
isPanMode = mode
bpan = E('btn-mode-pan')
bedit = E('btn-mode-edit')
removeClass bpan, 'button-active'
removeClass bedit, 'button-active'
addClass (if isPanMode then bpan else bedit), 'button-active'
doCanvasMouseMove = (e) ->
isPanAction = (e.shiftKey) ^ (isPanMode)
E('canvas-container').style.cursor = if isPanAction then 'move' else 'default'
if dragHandler isnt null
e.preventDefault()
dragHandler.mouseMoved e
doOpenEditor = ->
E('generic-tf-code').value = application.transitionFunc.code
E('generic-tf-editor').style.display = ''
doCloseEditor = ->
E('generic-tf-editor').style.display = 'none'
doSetRuleGeneric = ->
try
console.log "Set generic rule"
application.transitionFunc = new GenericTransitionFunc E('generic-tf-code').value
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
E('controls-rule-simple').style.display="none"
E('controls-rule-generic').style.display=""
true
catch e
alert "Failed to parse function: #{e}"
updateGenericRuleStatus 'Error'
false
doSetGrid = ->
try
n = parseInt E('entry-n').value, 10
m = parseInt E('entry-m').value, 10
if Number.isNaN(n) or n <= 0
throw new Error "Parameter N is bad"
if Number.isNaN(m) or m <= 0
throw new Error "Parameter M is bad"
#if 1/n + 1/m <= 1/2
if 2*(n+m) >= n*m
throw new Error "Tessellation {#{n}; #{m}} is not hyperbolic and not supported."
catch e
alert ""+e
return
application.setGridImpl n, m
application.doReset()
application.animator.reset()
updatePopulation = ->
E('population').innerHTML = ""+application.cells.count
updateGeneration = ->
E('generation').innerHTML = ""+application.generation
#exportTrivial = (cells) ->
# parts = []
# cells.forItems (cell, value)->
# parts.push ""+cell
# parts.push ""+value
# return parts.join " "
doExport = ->
data = stringifyFieldData exportField application.cells
n = application.tiling.n
m = application.tiling.m
showExportDialog "#{n}$#{m}$#{data}"
doExportClose = ->
E('export-dialog').style.display = 'none'
uploadToServer = (imgname, callback)->
dataURL = canvas.toDataURL();
cb = (blob) ->
formData = new FormData()
formData.append "file", blob, imgname
ajax = getAjax()
ajax.open 'POST', '/uploads/', false
ajax.onreadystatechange = -> callback(ajax)
ajax.send(formData)
canvas.toBlob cb, "image/png"
memo = null
doMemorize = ->
memo =
cells: application.cells.copy()
viewCenter: application.observer.getViewCenter()
viewOffset: application.observer.getViewOffsetMatrix()
generation: application.generation
console.log "Position memoized"
updateMemoryButtons()
doRemember = ->
if memo is null
console.log "nothing to remember"
else
application.cells = memo.cells.copy()
application.generation = memo.generation
application.observer.navigateTo memo.viewCenter, memo.viewOffset
updatePopulation()
updateGeneration()
doClearMemory = ->
memo = null
updateMemoryButtons()
updateMemoryButtons = ->
E('btn-mem-get').disabled = E('btn-mem-clear').disabled = memo is null
encodeVisible = ->
iCenter = application.tiling.inverse application.observer.cellFromPoint(0,0)
visibleCells = new ChainMap
for [cell, state] in application.observer.visibleCells application.cells
translatedCell = application.tiling.append iCenter, cell
translatedCell = application.tiling.toCell translatedCell
visibleCells.put translatedCell, state
return exportField visibleCells
showExportDialog = (sdata) ->
E('export').value = sdata
E('export-dialog').style.display = ''
E('export').focus()
E('export').select()
doExportVisible = ->
n = application.tiling.n
m = application.tiling.m
data = stringifyFieldData encodeVisible()
showExportDialog "#{n}$#{m}$#{data}"
doShowImport = ->
E('import-dialog').style.display = ''
E('import').focus()
doImportCancel = ->
E('import-dialog').style.display = 'none'
E('import').value=''
doImport = ->
try
application.importData E('import').value
updatePopulation()
redraw()
E('import-dialog').style.display = 'none'
E('import').value=''
catch e
alert "Error parsing: #{e}"
doEditAsGeneric = ->
application.transitionFunc = application.transitionFunc.toGeneric()
updateGenericRuleStatus 'Compiled'
application.paintStateSelector.update application.transitionFunc
application.updateRuleEditor()
doOpenEditor()
doDisableGeneric = ->
application.doSetRule()
doNavigateHome = ->
application.observer.navigateTo unity
# ============ Bind Events =================
E("btn-reset").addEventListener "click", ->application.doReset()
E("btn-step").addEventListener "click", ->application.doStep()
mouseMoveReceiver = E("canvas-container")
mouseMoveReceiver.addEventListener "mousedown", (e) -> doCanvasMouseDown(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mouseup", (e) -> doCanvasMouseUp(e) unless ghostClickDetector.isGhost
mouseMoveReceiver.addEventListener "mousemove", doCanvasMouseMove
mouseMoveReceiver.addEventListener "mousedrag", doCanvasMouseMove
mouseMoveReceiver.addEventListener "touchstart", doCanvasTouchStart
mouseMoveReceiver.addEventListener "touchend", doCanvasTouchEnd
mouseMoveReceiver.addEventListener "touchmove", doCanvasTouchMove
mouseMoveReceiver.addEventListener "touchleave", doCanvasTouchLeave
ghostClickDetector.addListeners canvas
E("btn-set-rule").addEventListener "click", (e)->application.doSetRule()
E("btn-set-rule-generic").addEventListener "click", (e)->
doSetRuleGeneric()
doCloseEditor()
E("btn-rule-generic-close-editor").addEventListener "click", doCloseEditor
E("btn-set-grid").addEventListener "click", doSetGrid
E("btn-export").addEventListener "click", doExport
E('btn-search').addEventListener 'click', ->application.doSearch()
E('btn-random').addEventListener 'click', -> application.doRandomFill()
E('btn-rule-make-generic').addEventListener 'click', doEditAsGeneric
E('btn-edit-rule').addEventListener 'click', doOpenEditor
E('btn-disable-generic-rule').addEventListener 'click', doDisableGeneric
E('btn-export-close').addEventListener 'click', doExportClose
E('btn-import').addEventListener 'click', doShowImport
E('btn-import-cancel').addEventListener 'click', doImportCancel
E('btn-import-run').addEventListener 'click', doImport
#initialize
E('btn-mem-set').addEventListener 'click', doMemorize
E('btn-mem-get').addEventListener 'click', doRemember
E('btn-mem-clear').addEventListener 'click', doClearMemory
E('btn-exp-visible').addEventListener 'click', doExportVisible
E('btn-nav-home').addEventListener 'click', doNavigateHome
window.addEventListener 'resize', updateCanvasSize
E('btn-nav-clear').addEventListener 'click', (e) -> application.navigator.clear()
E('btn-play-start').addEventListener 'click', doTogglePlayer
E('btn-play-stop').addEventListener 'click', doTogglePlayer
E('animate-set-start').addEventListener 'click', -> application.animator.setStart application.observer
E('animate-set-end').addEventListener 'click', -> application.animator.setEnd application.observer
E('animate-view-start').addEventListener 'click', -> application.animator.viewStart application.observer
E('animate-view-end').addEventListener 'click', -> application.animator.viewEnd application.observer
E('btn-animate-derotate').addEventListener 'click', -> application.animator.derotate()
E('btn-upload-animation').addEventListener 'click', (e)->
application.animator.animate application.observer, parseIntChecked(E('animate-frame-per-generation').value), parseIntChecked(E('animate-generations').value), (-> null)
E('btn-animate-cancel').addEventListener 'click', (e)->application.animator.cancelWork()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('view-straighten').addEventListener 'click', (e)-> application.observer.straightenView()
E('image-fix-size').addEventListener 'click', (e)-> doSetFixedSize E('image-fix-size').checked
E('image-size').addEventListener 'change', (e) ->
E('image-fix-size').checked=true
doSetFixedSize true
E('flag-origin-mark').addEventListener 'change', (e)->
application.setDrawingHomePtr E('flag-origin-mark').checked
E('flag-live-borders').addEventListener 'change', (e)->
application.setShowLiveBorders E('flag-live-borders').checked
E('btn-mode-edit').addEventListener 'click', (e) -> doSetPanMode false
E('btn-mode-pan').addEventListener 'click', (e) -> doSetPanMode true
E('btn-db-save').addEventListener 'click', (e) -> application.saveDialog.show()
E('btn-db-load').addEventListener 'click', (e) -> application.openDialog.show()
E('btn-export-svg').addEventListener 'click', (e) -> application.doExportSvg()
E('btn-svg-export-dialog-close').addEventListener 'click', (e) -> application.svgDialog.close()
E('btn-export-uri').addEventListener 'click', (e) -> application.doExportUrl()
shortcuts =
'N': -> application.doStep()
'C': -> application.doReset()
'S': -> application.doSearch()
'R': ->application.doRandomFill()
'1': (e) -> application.paintStateSelector.setState 1
'2': (e) -> application.paintStateSelector.setState 2
'3': (e) -> application.paintStateSelector.setState 3
'4': (e) -> application.paintStateSelector.setState 4
'5': (e) -> application.paintStateSelector.setState 5
'M': doMemorize
'U': doRemember
'UA': doClearMemory
'H': doNavigateHome
'G': doTogglePlayer
'SA': (e) -> application.observer.straightenView()
'#32': doTogglePlayer
'P': (e) -> doSetPanMode true
'E': (e) -> doSetPanMode false
'SC': (e) -> application.saveDialog.show()
'OC': (e) -> application.openDialog.show()
document.addEventListener "keydown", (e)->
focused = document.activeElement
if focused and focused.tagName.toLowerCase() in ['textarea', 'input']
return
keyCode = if e.keyCode > 32 and e.keyCode < 128
String.fromCharCode e.keyCode
else
'#' + e.keyCode
keyCode += "C" if e.ctrlKey
keyCode += "A" if e.altKey
keyCode += "S" if e.shiftKey
#console.log keyCode
if (handler = shortcuts[keyCode])?
e.preventDefault()
handler(e)
##Application startup
application = new Application
application.initialize new UriConfig
doSetPanMode true
updatePopulation()
updateGeneration()
updateCanvasSize()
updateMemoryButtons()
updatePlayButtons()
redrawLoop()
#application.saveDialog.show()
|
[
{
"context": "ader = \"OUT, DAMN'D SPOT!\"\n message = \"Help Lady Macbeth clean the blood\\n\"+\n ",
"end": 1051,
"score": 0.5605266094207764,
"start": 1050,
"tag": "NAME",
"value": "L"
}
] | src/LobbyState.coffee | rocket-hands/MusicalSacrifice | 0 | MS = window.MusicalSacrifice
class LobbyState extends Phaser.State
create:->
pitch = @game.add.sprite(@game.world.centerX, @game.world.centerY, 'william')
pitch.anchor.setTo(0.5, 0.5)
pitch.width = 800
pitch.height = 450
@backgroundGroup = @game.add.group()
@playerGroup = @game.add.group()
@header ="=== MUSICAL SACRIFICE ==="
message ="#{@header}\n\nConnecting..."
style =
font: "30px Courier"
fill: "#00ff44"
align: "center"
@text = @game.add.text(@game.world.centerX, @game.world.centerY, message, style)
@text.anchor.setTo(0.5, 0.5)
update:->
gm = @game.entityManager.getGM()
nextLevel = gm?.nextLevel
if nextLevel?
message = "..."
if nextLevel == "Yorick"
@header = "ALAS, POOR YORICK!"
message = "Pass the ballskull around and make\n"+
"sure everyone gets a touch. Deliver\n"+
"a monologue if you dare!"
if nextLevel == "Cleaning"
@header = "OUT, DAMN'D SPOT!"
message = "Help Lady Macbeth clean the blood\n"+
"from the floorboards... get every\n"+
"drop to win!"
if nextLevel == "Acting"
@header = "WHAT LIGHT THROUGH YONDER WINDOW!"
message = "Woo your fellow players..."
if nextLevel == "Killing"
@header = "ET TU, BRUTE!"
message = "Grab the knives... whoever is last gets\n"+
"to be killed :("
if gm.intermissionMessage
message = "#{gm.intermissionMessage}\n\n#{message}"
@text.setText("#{@header}\n\n#{message}")
@text.anchor.setTo(0.5, 0)
@text.position.y = 100
@game.entityManager.update()
MS.LobbyState = LobbyState
| 174099 | MS = window.MusicalSacrifice
class LobbyState extends Phaser.State
create:->
pitch = @game.add.sprite(@game.world.centerX, @game.world.centerY, 'william')
pitch.anchor.setTo(0.5, 0.5)
pitch.width = 800
pitch.height = 450
@backgroundGroup = @game.add.group()
@playerGroup = @game.add.group()
@header ="=== MUSICAL SACRIFICE ==="
message ="#{@header}\n\nConnecting..."
style =
font: "30px Courier"
fill: "#00ff44"
align: "center"
@text = @game.add.text(@game.world.centerX, @game.world.centerY, message, style)
@text.anchor.setTo(0.5, 0.5)
update:->
gm = @game.entityManager.getGM()
nextLevel = gm?.nextLevel
if nextLevel?
message = "..."
if nextLevel == "Yorick"
@header = "ALAS, POOR YORICK!"
message = "Pass the ballskull around and make\n"+
"sure everyone gets a touch. Deliver\n"+
"a monologue if you dare!"
if nextLevel == "Cleaning"
@header = "OUT, DAMN'D SPOT!"
message = "Help <NAME>ady Macbeth clean the blood\n"+
"from the floorboards... get every\n"+
"drop to win!"
if nextLevel == "Acting"
@header = "WHAT LIGHT THROUGH YONDER WINDOW!"
message = "Woo your fellow players..."
if nextLevel == "Killing"
@header = "ET TU, BRUTE!"
message = "Grab the knives... whoever is last gets\n"+
"to be killed :("
if gm.intermissionMessage
message = "#{gm.intermissionMessage}\n\n#{message}"
@text.setText("#{@header}\n\n#{message}")
@text.anchor.setTo(0.5, 0)
@text.position.y = 100
@game.entityManager.update()
MS.LobbyState = LobbyState
| true | MS = window.MusicalSacrifice
class LobbyState extends Phaser.State
create:->
pitch = @game.add.sprite(@game.world.centerX, @game.world.centerY, 'william')
pitch.anchor.setTo(0.5, 0.5)
pitch.width = 800
pitch.height = 450
@backgroundGroup = @game.add.group()
@playerGroup = @game.add.group()
@header ="=== MUSICAL SACRIFICE ==="
message ="#{@header}\n\nConnecting..."
style =
font: "30px Courier"
fill: "#00ff44"
align: "center"
@text = @game.add.text(@game.world.centerX, @game.world.centerY, message, style)
@text.anchor.setTo(0.5, 0.5)
update:->
gm = @game.entityManager.getGM()
nextLevel = gm?.nextLevel
if nextLevel?
message = "..."
if nextLevel == "Yorick"
@header = "ALAS, POOR YORICK!"
message = "Pass the ballskull around and make\n"+
"sure everyone gets a touch. Deliver\n"+
"a monologue if you dare!"
if nextLevel == "Cleaning"
@header = "OUT, DAMN'D SPOT!"
message = "Help PI:NAME:<NAME>END_PIady Macbeth clean the blood\n"+
"from the floorboards... get every\n"+
"drop to win!"
if nextLevel == "Acting"
@header = "WHAT LIGHT THROUGH YONDER WINDOW!"
message = "Woo your fellow players..."
if nextLevel == "Killing"
@header = "ET TU, BRUTE!"
message = "Grab the knives... whoever is last gets\n"+
"to be killed :("
if gm.intermissionMessage
message = "#{gm.intermissionMessage}\n\n#{message}"
@text.setText("#{@header}\n\n#{message}")
@text.anchor.setTo(0.5, 0)
@text.position.y = 100
@game.entityManager.update()
MS.LobbyState = LobbyState
|
[
{
"context": "module.exports =\n\n name: 'Highlight All'\n\n edit:\n class AllSaver\n get: (command,",
"end": 40,
"score": 0.9586345553398132,
"start": 27,
"tag": "NAME",
"value": "Highlight All"
}
] | lib/stream-modifiers/all.coffee | fstiewitz/build-tools-cpp | 3 | module.exports =
name: 'Highlight All'
edit:
class AllSaver
get: (command, stream) ->
command[stream].pipeline.push name: 'all'
return null
modifier:
class AllModifier
modify: ({temp}) ->
temp.type = 'warning' unless temp.type? and temp.type isnt ''
return null
| 86871 | module.exports =
name: '<NAME>'
edit:
class AllSaver
get: (command, stream) ->
command[stream].pipeline.push name: 'all'
return null
modifier:
class AllModifier
modify: ({temp}) ->
temp.type = 'warning' unless temp.type? and temp.type isnt ''
return null
| true | module.exports =
name: 'PI:NAME:<NAME>END_PI'
edit:
class AllSaver
get: (command, stream) ->
command[stream].pipeline.push name: 'all'
return null
modifier:
class AllModifier
modify: ({temp}) ->
temp.type = 'warning' unless temp.type? and temp.type isnt ''
return null
|
[
{
"context": "# tinytest-fixture-account\n# MIT License ben@latenightsketches.com\n# test/fixtureAccount.coffee\n\nif Meteor.isClient\n",
"end": 66,
"score": 0.9999234080314636,
"start": 41,
"tag": "EMAIL",
"value": "ben@latenightsketches.com"
},
{
"context": " Meteor.user()\n ... | test/fixtureAccount.coffee | numtel/tinytest-fixture-account | 2 | # tinytest-fixture-account
# MIT License ben@latenightsketches.com
# test/fixtureAccount.coffee
if Meteor.isClient
Tinytest.addAsync 'account created + deleted', (test, done) ->
# Create an account then cleanup, then do it with same username again to
# make sure account is deleted.
secondAccount = ->
fixtureAccount done, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
fixtureAccount secondAccount, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
| 188069 | # tinytest-fixture-account
# MIT License <EMAIL>
# test/fixtureAccount.coffee
if Meteor.isClient
Tinytest.addAsync 'account created + deleted', (test, done) ->
# Create an account then cleanup, then do it with same username again to
# make sure account is deleted.
secondAccount = ->
fixtureAccount done, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
fixtureAccount secondAccount, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
| true | # tinytest-fixture-account
# MIT License PI:EMAIL:<EMAIL>END_PI
# test/fixtureAccount.coffee
if Meteor.isClient
Tinytest.addAsync 'account created + deleted', (test, done) ->
# Create an account then cleanup, then do it with same username again to
# make sure account is deleted.
secondAccount = ->
fixtureAccount done, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
fixtureAccount secondAccount, {}, (error, cleanup) ->
throw error if error
user = Meteor.user()
test.equal user.username, 'testUser5'
cleanup()
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7974417209625244,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | src/sq/spec.coffee | phillipb/Bible-Passage-Reference-Parser | 149 | bcv_parser = require("../../js/sq_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sq)", ->
`
expect(p.parse("Zanafilla 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Zan 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ZANAFILLA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ZAN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sq)", ->
`
expect(p.parse("Eksodi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dalja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dal 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DALJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DAL 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sq)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sq)", ->
`
expect(p.parse("Levitiket 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikët 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitiku 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKET 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKËT 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKU 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sq)", ->
`
expect(p.parse("Numrat 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMRAT 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sq)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sq)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sq)", ->
`
expect(p.parse("Vajtimet 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Vaj 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VAJTIMET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VAJ 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sq)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sq)", ->
`
expect(p.parse("Zbulesa 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zbu 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zb 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ZBULESA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZBU 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZB 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sq)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sq)", ->
`
expect(p.parse("Ligji i Perterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Perterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LiP 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("LIGJI I PERTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIP 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sq)", ->
`
expect(p.parse("Jozueu 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUEU 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sq)", ->
`
expect(p.parse("Gjyqtaret 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjyqtarët 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjy 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("GJYQTARET 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJYQTARËT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJY 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sq)", ->
`
expect(p.parse("Ruthi 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTHI 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sq)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sq)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sq)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaia 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAIA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sq)", ->
`
expect(p.parse("2 e. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 e Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 E SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sq)", ->
`
expect(p.parse("1 e. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 e Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 E SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sq)", ->
`
expect(p.parse("2 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mb 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MB 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sq)", ->
`
expect(p.parse("1 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mb 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MB 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sq)", ->
`
expect(p.parse("2 e. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sq)", ->
`
expect(p.parse("1 e. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sq)", ->
`
expect(p.parse("Esdra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esd 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESDRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESD 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sq)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sq)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sq)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sq)", ->
`
expect(p.parse("Hiobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sq)", ->
`
expect(p.parse("Libri i Psalmeve 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmet 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("LIBRI I PSALMEVE 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMET 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sq)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sq)", ->
`
expect(p.parse("Fjalet e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Fjalët e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("FJALET E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("FJALËT E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sq)", ->
`
expect(p.parse("Predikuesi 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKUESI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sq)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sq)", ->
`
expect(p.parse("Kantiku i Kantikeve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kantiku i Kantikëve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kant 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("KANTIKU I KANTIKEVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANTIKU I KANTIKËVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sq)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sq)", ->
`
expect(p.parse("Ezekieli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eze 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZE 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (sq)", ->
`
expect(p.parse("Danieli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (sq)", ->
`
expect(p.parse("Osea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Os 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sq)", ->
`
expect(p.parse("Joeli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sq)", ->
`
expect(p.parse("Amosi 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOSI 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sq)", ->
`
expect(p.parse("Abdia 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (sq)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (sq)", ->
`
expect(p.parse("Mikea 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mik 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIKEA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIK 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sq)", ->
`
expect(p.parse("Nahumi 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMI 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sq)", ->
`
expect(p.parse("Habakuku 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKU 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sq)", ->
`
expect(p.parse("Sofonia 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sq)", ->
`
expect(p.parse("Hagai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sq)", ->
`
expect(p.parse("Zakaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAKARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sq)", ->
`
expect(p.parse("Malakia 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (sq)", ->
`
expect(p.parse("Ungjilli i Mateut 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mateu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MATEUT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATEU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (sq)", ->
`
expect(p.parse("Ungjilli i Markut 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MARKUT 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (sq)", ->
`
expect(p.parse("Ungjilli i Lukes 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Ungjilli i Lukës 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lluka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Llu 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I LUKES 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("UNGJILLI I LUKËS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLU 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (sq)", ->
`
expect(p.parse("1 e. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 e Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gj 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 E GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJ 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (sq)", ->
`
expect(p.parse("2 e. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 e Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gj 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 E GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJ 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sq)", ->
`
expect(p.parse("3 e. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 e Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gj 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 E. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 E GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJ 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (sq)", ->
`
expect(p.parse("Ungjilli i Gjonit 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjoni 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjo 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I GJONIT 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJONI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJO 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sq)", ->
`
expect(p.parse("Veprat e Apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat e apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Vep 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEP 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sq)", ->
`
expect(p.parse("Romakeve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romakëve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMAKEVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMAKËVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sq)", ->
`
expect(p.parse("2 e. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sq)", ->
`
expect(p.parse("1 e. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sq)", ->
`
expect(p.parse("Galatasve 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATASVE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sq)", ->
`
expect(p.parse("Efesianeve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesianëve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIANEVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIANËVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (sq)", ->
`
expect(p.parse("Filipianeve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipianëve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPIANEVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPIANËVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sq)", ->
`
expect(p.parse("Kolosianeve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolosianëve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSIANEVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSIANËVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sq)", ->
`
expect(p.parse("2 e. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Th 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TH 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sq)", ->
`
expect(p.parse("1 e. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Th 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TH 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sq)", ->
`
expect(p.parse("2 e. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 e Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 E TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sq)", ->
`
expect(p.parse("1 e. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 e Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 E TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sq)", ->
`
expect(p.parse("Titit 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITIT 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sq)", ->
`
expect(p.parse("Filemonit 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONIT 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sq)", ->
`
expect(p.parse("Hebrenjve 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRENJVE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sq)", ->
`
expect(p.parse("Jakobit 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobi 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBIT 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBI 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sq)", ->
`
expect(p.parse("2 e. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 e Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pj 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 E PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJ 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sq)", ->
`
expect(p.parse("1 e. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 e Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pj 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 E PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJ 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sq)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sq)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sq)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sq)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sq)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sq)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sq)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sq)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sq)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sq"]
it "should handle ranges (sq)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sq)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sq)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sq)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sq)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sq)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sq)", ->
expect(p.parse("Lev 1 (ALB)").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
expect(p.parse("lev 1 alb").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
it "should handle book ranges (sq)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 i - 3 i Gjonit").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sq)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 177018 | bcv_parser = require("../../js/sq_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sq)", ->
`
expect(p.parse("Zanafilla 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Zan 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ZANAFILLA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ZAN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sq)", ->
`
expect(p.parse("Eksodi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dalja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dal 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DALJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DAL 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sq)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sq)", ->
`
expect(p.parse("Levitiket 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikët 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitiku 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKET 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKËT 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKU 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sq)", ->
`
expect(p.parse("Numrat 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMRAT 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sq)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sq)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sq)", ->
`
expect(p.parse("Vajtimet 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Vaj 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VAJTIMET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VAJ 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sq)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sq)", ->
`
expect(p.parse("Zbulesa 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zbu 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zb 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ZBULESA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZBU 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZB 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sq)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sq)", ->
`
expect(p.parse("Ligji i Perterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Perterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LiP 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("LIGJI I PERTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIP 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sq)", ->
`
expect(p.parse("Jozueu 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUEU 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sq)", ->
`
expect(p.parse("Gjyqtaret 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjyqtarët 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjy 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("GJYQTARET 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJYQTARËT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJY 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sq)", ->
`
expect(p.parse("Ruthi 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTHI 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sq)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sq)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sq)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaia 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAIA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sq)", ->
`
expect(p.parse("2 e. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 e Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 E SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sq)", ->
`
expect(p.parse("1 e. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 e Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 E SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sq)", ->
`
expect(p.parse("2 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mb 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MB 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sq)", ->
`
expect(p.parse("1 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mb 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MB 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sq)", ->
`
expect(p.parse("2 e. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sq)", ->
`
expect(p.parse("1 e. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sq)", ->
`
expect(p.parse("Esdra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esd 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESDRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESD 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sq)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sq)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sq)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sq)", ->
`
expect(p.parse("Hiobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sq)", ->
`
expect(p.parse("Libri i Psalmeve 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmet 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("LIBRI I PSALMEVE 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMET 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sq)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sq)", ->
`
expect(p.parse("Fjalet e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Fjalët e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("FJALET E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("FJALËT E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sq)", ->
`
expect(p.parse("Predikuesi 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKUESI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sq)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sq)", ->
`
expect(p.parse("Kantiku i Kantikeve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kantiku i Kantikëve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kant 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("KANTIKU I KANTIKEVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANTIKU I KANTIKËVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sq)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sq)", ->
`
expect(p.parse("Ezekieli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eze 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZE 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (sq)", ->
`
expect(p.parse("Danieli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (sq)", ->
`
expect(p.parse("Osea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Os 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sq)", ->
`
expect(p.parse("Joeli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sq)", ->
`
expect(p.parse("Amosi 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOSI 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sq)", ->
`
expect(p.parse("Abdia 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME>ah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ah (sq)", ->
`
expect(p.parse("<NAME>ah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book <NAME>ic (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ic (sq)", ->
`
expect(p.parse("Mikea 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mik 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIKEA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIK 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sq)", ->
`
expect(p.parse("Nahumi 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMI 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sq)", ->
`
expect(p.parse("Habakuku 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKU 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sq)", ->
`
expect(p.parse("Sofonia 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sq)", ->
`
expect(p.parse("Hagai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sq)", ->
`
expect(p.parse("Zakaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAKARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sq)", ->
`
expect(p.parse("Malakia 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book <NAME>att (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>att (sq)", ->
`
expect(p.parse("Ungjilli i Mateut 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mateu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MATEUT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATEU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (sq)", ->
`
expect(p.parse("Ungjilli i Markut 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MARKUT 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sq)", ->
`
expect(p.parse("Ungjilli i Lukes 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Ungjilli i Lukës 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lluka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Llu 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I LUKES 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("UNGJILLI I LUKËS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLU 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (sq)", ->
`
expect(p.parse("1 e. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 e Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gj 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 E GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJ 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (sq)", ->
`
expect(p.parse("2 e. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 e Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gj 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 E GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJ 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sq)", ->
`
expect(p.parse("3 e. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 e Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gj 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 E. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 E GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJ 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sq)", ->
`
expect(p.parse("Ungjilli i Gjonit 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjoni 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjo 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I GJONIT 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJONI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJO 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sq)", ->
`
expect(p.parse("Veprat e Apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat e apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Vep 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEP 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sq)", ->
`
expect(p.parse("Romakeve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romakëve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMAKEVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMAKËVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sq)", ->
`
expect(p.parse("2 e. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sq)", ->
`
expect(p.parse("1 e. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sq)", ->
`
expect(p.parse("Galatasve 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATASVE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sq)", ->
`
expect(p.parse("Efesianeve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesianëve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIANEVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIANËVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book <NAME> (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sq)", ->
`
expect(p.parse("Filipianeve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipianëve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPIANEVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPIANËVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sq)", ->
`
expect(p.parse("Kolosianeve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolosianëve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSIANEVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSIANËVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sq)", ->
`
expect(p.parse("2 e. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Th 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TH 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sq)", ->
`
expect(p.parse("1 e. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Th 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TH 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sq)", ->
`
expect(p.parse("2 e. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 e Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 E TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sq)", ->
`
expect(p.parse("1 e. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 e Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 E TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sq)", ->
`
expect(p.parse("Titit 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITIT 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sq)", ->
`
expect(p.parse("Filemonit 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONIT 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sq)", ->
`
expect(p.parse("Hebrenjve 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRENJVE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sq)", ->
`
expect(p.parse("Jakobit 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobi 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBIT 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBI 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sq)", ->
`
expect(p.parse("2 e. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 e Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pj 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 E PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJ 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sq)", ->
`
expect(p.parse("1 e. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 e Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pj 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 E PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJ 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sq)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sq)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sq)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sq)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sq)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sq)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sq)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sq)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sq)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sq"]
it "should handle ranges (sq)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sq)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sq)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sq)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sq)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sq)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sq)", ->
expect(p.parse("Lev 1 (ALB)").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
expect(p.parse("lev 1 alb").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
it "should handle book ranges (sq)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 i - 3 i Gjonit").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sq)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/sq_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sq)", ->
`
expect(p.parse("Zanafilla 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Zan 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ZANAFILLA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ZAN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sq)", ->
`
expect(p.parse("Eksodi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dalja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Dal 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DALJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("DAL 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sq)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sq)", ->
`
expect(p.parse("Levitiket 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikët 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitiku 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKET 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKËT 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKU 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sq)", ->
`
expect(p.parse("Numrat 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMRAT 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sq)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sq)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sq)", ->
`
expect(p.parse("Vajtimet 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Vaj 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VAJTIMET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VAJ 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sq)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sq)", ->
`
expect(p.parse("Zbulesa 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zbu 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Zb 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ZBULESA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZBU 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ZB 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sq)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sq)", ->
`
expect(p.parse("Ligji i Perterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Perterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Pertërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përterirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërire 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i Përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Ligji i përtërirë 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LiP 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("LIGJI I PERTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PERTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTERIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRE 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIGJI I PËRTËRIRË 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("LIP 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sq)", ->
`
expect(p.parse("Jozueu 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUEU 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sq)", ->
`
expect(p.parse("Gjyqtaret 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjyqtarët 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Gjy 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("GJYQTARET 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJYQTARËT 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("GJY 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sq)", ->
`
expect(p.parse("Ruthi 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTHI 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sq)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sq)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sq)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaia 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAIA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sq)", ->
`
expect(p.parse("2 e. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 e Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 i Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelit 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 E SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 I SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELIT 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sq)", ->
`
expect(p.parse("1 e. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 e Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 i Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelit 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 E SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 I SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELIT 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sq)", ->
`
expect(p.parse("2 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 e Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 i Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbreterve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 Mbretërve 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Mb 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 E MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 I MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4. MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETERVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("4 MBRETËRVE 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 MB 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sq)", ->
`
expect(p.parse("1 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 e Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 i Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbreterve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 Mbretërve 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Mb 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 E MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 I MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3. MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETERVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("3 MBRETËRVE 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 MB 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sq)", ->
`
expect(p.parse("2 e. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronikave 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 e Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 i Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kronika 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKAVE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 E KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 I KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sq)", ->
`
expect(p.parse("1 e. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronikave 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 e Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 i Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kronika 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKAVE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 E KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 I KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sq)", ->
`
expect(p.parse("Esdra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esd 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESDRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESD 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sq)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sq)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sq)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sq)", ->
`
expect(p.parse("Hiobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobi 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("HIOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBI 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sq)", ->
`
expect(p.parse("Libri i Psalmeve 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmet 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("LIBRI I PSALMEVE 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMET 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sq)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sq)", ->
`
expect(p.parse("Fjalet e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Fjalët e urta 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("FJALET E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("FJALËT E URTA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sq)", ->
`
expect(p.parse("Predikuesi 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKUESI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sq)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sq)", ->
`
expect(p.parse("Kantiku i Kantikeve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kantiku i Kantikëve 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Kant 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("KANTIKU I KANTIKEVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANTIKU I KANTIKËVE 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KANT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sq)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sq)", ->
`
expect(p.parse("Ezekieli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eze 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZE 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (sq)", ->
`
expect(p.parse("Danieli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (sq)", ->
`
expect(p.parse("Osea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Os 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sq)", ->
`
expect(p.parse("Joeli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sq)", ->
`
expect(p.parse("Amosi 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOSI 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sq)", ->
`
expect(p.parse("Abdia 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIah (sq)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PIah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIic (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIic (sq)", ->
`
expect(p.parse("Mikea 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mik 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIKEA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIK 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sq)", ->
`
expect(p.parse("Nahumi 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMI 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sq)", ->
`
expect(p.parse("Habakuku 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKU 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sq)", ->
`
expect(p.parse("Sofonia 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sq)", ->
`
expect(p.parse("Hagai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sq)", ->
`
expect(p.parse("Zakaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAKARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sq)", ->
`
expect(p.parse("Malakia 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIatt (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIatt (sq)", ->
`
expect(p.parse("Ungjilli i Mateut 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mateu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MATEUT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATEU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (sq)", ->
`
expect(p.parse("Ungjilli i Markut 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I MARKUT 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sq)", ->
`
expect(p.parse("Ungjilli i Lukes 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Ungjilli i Lukës 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lluka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Llu 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I LUKES 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("UNGJILLI I LUKËS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LLU 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (sq)", ->
`
expect(p.parse("1 e. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 e Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 i Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gjonit 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Gj 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 E GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 I GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJONIT 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 GJ 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (sq)", ->
`
expect(p.parse("2 e. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 e Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 i Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gjonit 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Gj 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 E GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 I GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJONIT 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 GJ 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sq)", ->
`
expect(p.parse("3 e. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 e Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 i Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gjonit 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Gj 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 E. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 E GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 I GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJONIT 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 GJ 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sq)", ->
`
expect(p.parse("Ungjilli i Gjonit 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjoni 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Gjo 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("UNGJILLI I GJONIT 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJONI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("GJO 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sq)", ->
`
expect(p.parse("Veprat e Apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat e apostujve 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Veprat 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Vep 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT E APOSTUJVE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEPRAT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("VEP 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sq)", ->
`
expect(p.parse("Romakeve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Romakëve 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMAKEVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMAKËVE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sq)", ->
`
expect(p.parse("2 e. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 e Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 i Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Koritasve 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 E KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 I KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORITASVE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sq)", ->
`
expect(p.parse("1 e. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 e Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 i Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Koritasve 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 E KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 I KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORITASVE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sq)", ->
`
expect(p.parse("Galatasve 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALATASVE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sq)", ->
`
expect(p.parse("Efesianeve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesianëve 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIANEVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIANËVE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sq)", ->
`
expect(p.parse("Filipianeve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipianëve 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPIANEVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPIANËVE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sq)", ->
`
expect(p.parse("Kolosianeve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolosianëve 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSIANEVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSIANËVE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sq)", ->
`
expect(p.parse("2 e. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 e Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 i Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Selanikasve 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Th 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 E SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 I SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 SELANIKASVE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TH 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sq)", ->
`
expect(p.parse("1 e. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 e Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 i Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Selanikasve 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Th 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 E SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 I SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 SELANIKASVE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TH 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sq)", ->
`
expect(p.parse("2 e. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 e Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 i Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteut 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 E TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 I TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUT 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sq)", ->
`
expect(p.parse("1 e. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 e Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 i Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteut 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 E TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 I TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUT 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sq)", ->
`
expect(p.parse("Titit 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITIT 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sq)", ->
`
expect(p.parse("Filemonit 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONIT 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sq)", ->
`
expect(p.parse("Hebrenjve 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBRENJVE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sq)", ->
`
expect(p.parse("Jakobit 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobi 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBIT 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBI 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sq)", ->
`
expect(p.parse("2 e. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 e Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 i Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pjetrit 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pj 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 E. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 E PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 I PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJETRIT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PJ 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sq)", ->
`
expect(p.parse("1 e. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 e Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 i Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pjetrit 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pj 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 E. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 E PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 I PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJETRIT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PJ 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sq)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sq)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sq)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sq)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sq)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sq)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sq)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sq)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sq)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sq)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sq"]
it "should handle ranges (sq)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sq)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sq)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sq)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sq)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sq)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sq)", ->
expect(p.parse("Lev 1 (ALB)").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
expect(p.parse("lev 1 alb").osis_and_translations()).toEqual [["Lev.1", "ALB"]]
it "should handle book ranges (sq)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 i - 3 i Gjonit").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sq)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "###\n * tpl 打包 ( 单页应用需要 )\n * @author jackie Lin <dashi_lin@163.com>\n###\n'use strict'\n\nthrough2 = ",
"end": 46,
"score": 0.9983792304992676,
"start": 36,
"tag": "NAME",
"value": "jackie Lin"
},
{
"context": "###\n * tpl 打包 ( 单页应用需要 )\n * @author jackie Lin <dashi_li... | tpl.coffee | JackieLin/gis | 0 | ###
* tpl 打包 ( 单页应用需要 )
* @author jackie Lin <dashi_lin@163.com>
###
'use strict'
through2 = require 'through2'
FS = require "q-io/fs"
path = require 'path'
_ = require 'lodash'
###
* 生成 tpl 信息
###
buildTpl = (contents, title='') ->
"""
define("#{title}", function() {
"use strict";
return #{contents}
});
"""
###
* 获取 require 文件信息
###
getRequireKey = (srcPath) ->
baseName = path.basename srcPath
packageName = path.basename path.join(srcPath, '../')
packageName + '/' + baseName
exports.build = ->
fileList = []
through2.obj (file, enc, callback) ->
srcPath = file.path
FS.stat srcPath
.then (stat) ->
throw new Error '%s is not directory', srcPath if stat.isDirectory() is false
FS.list srcPath
.then (list) ->
_.select list, (item) ->
path.extname(item) is '.html'
.then (list) ->
task = []
# 下一个方法需要
fileList = list
list.forEach (v) ->
task.push FS.read(path.join(srcPath, v))
task
.spread ->
content = {}
args = Array.prototype.slice.call arguments
fileList.forEach (v, k) ->
content[v] = args[k]
fileName = path.basename srcPath
file.contents = new Buffer buildTpl(JSON.stringify(content), getRequireKey(srcPath))
file.path = path.join srcPath, fileName + '.min.js'
# console.log file.path
# console.log args
callback null, file
.fail (err) ->
callback err, null
| 133200 | ###
* tpl 打包 ( 单页应用需要 )
* @author <NAME> <<EMAIL>>
###
'use strict'
through2 = require 'through2'
FS = require "q-io/fs"
path = require 'path'
_ = require 'lodash'
###
* 生成 tpl 信息
###
buildTpl = (contents, title='') ->
"""
define("#{title}", function() {
"use strict";
return #{contents}
});
"""
###
* 获取 require 文件信息
###
getRequireKey = (srcPath) ->
baseName = path.basename srcPath
packageName = path.basename path.join(srcPath, '../')
packageName + '/' + baseName
exports.build = ->
fileList = []
through2.obj (file, enc, callback) ->
srcPath = file.path
FS.stat srcPath
.then (stat) ->
throw new Error '%s is not directory', srcPath if stat.isDirectory() is false
FS.list srcPath
.then (list) ->
_.select list, (item) ->
path.extname(item) is '.html'
.then (list) ->
task = []
# 下一个方法需要
fileList = list
list.forEach (v) ->
task.push FS.read(path.join(srcPath, v))
task
.spread ->
content = {}
args = Array.prototype.slice.call arguments
fileList.forEach (v, k) ->
content[v] = args[k]
fileName = path.basename srcPath
file.contents = new Buffer buildTpl(JSON.stringify(content), getRequireKey(srcPath))
file.path = path.join srcPath, fileName + '.min.js'
# console.log file.path
# console.log args
callback null, file
.fail (err) ->
callback err, null
| true | ###
* tpl 打包 ( 单页应用需要 )
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
'use strict'
through2 = require 'through2'
FS = require "q-io/fs"
path = require 'path'
_ = require 'lodash'
###
* 生成 tpl 信息
###
buildTpl = (contents, title='') ->
"""
define("#{title}", function() {
"use strict";
return #{contents}
});
"""
###
* 获取 require 文件信息
###
getRequireKey = (srcPath) ->
baseName = path.basename srcPath
packageName = path.basename path.join(srcPath, '../')
packageName + '/' + baseName
exports.build = ->
fileList = []
through2.obj (file, enc, callback) ->
srcPath = file.path
FS.stat srcPath
.then (stat) ->
throw new Error '%s is not directory', srcPath if stat.isDirectory() is false
FS.list srcPath
.then (list) ->
_.select list, (item) ->
path.extname(item) is '.html'
.then (list) ->
task = []
# 下一个方法需要
fileList = list
list.forEach (v) ->
task.push FS.read(path.join(srcPath, v))
task
.spread ->
content = {}
args = Array.prototype.slice.call arguments
fileList.forEach (v, k) ->
content[v] = args[k]
fileName = path.basename srcPath
file.contents = new Buffer buildTpl(JSON.stringify(content), getRequireKey(srcPath))
file.path = path.join srcPath, fileName + '.min.js'
# console.log file.path
# console.log args
callback null, file
.fail (err) ->
callback err, null
|
[
{
"context": "onfig: meshbluConfig\n mongoDbUri: 'mongodb://127.0.0.1/test-uuid-alias-service'\n\n @server = new Serve",
"end": 550,
"score": 0.9763401746749878,
"start": 541,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " new Datastore\n database: mongojs '... | test/integration/create-alias-integration-spec.coffee | joaoaneto/uuid-alias-service | 0 | http = require 'http'
request = require 'request'
shmock = require 'shmock'
Server = require '../../src/server'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
describe 'POST /aliases', ->
beforeEach ->
@meshblu = shmock 0xd00d
afterEach (done) ->
@meshblu.close => done()
beforeEach (done) ->
meshbluConfig =
server: 'localhost'
port: 0xd00d
serverOptions =
port: undefined,
disableLogging: true
meshbluConfig: meshbluConfig
mongoDbUri: 'mongodb://127.0.0.1/test-uuid-alias-service'
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach ->
@whoamiHandler = @meshblu.get('/v2/whoami')
.reply(200, '{"uuid": "899801b3-e877-4c69-93db-89bd9787ceea"}')
beforeEach (done) ->
@datastore = new Datastore
database: mongojs 'mongodb://127.0.0.1/test-uuid-alias-service'
collection: 'aliases'
@datastore.remove {}, (error) => done() # delete everything
afterEach (done) ->
@server.stop => done()
context 'when given a valid alias', ->
context 'an ascii name', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: 'premature-burial'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'premature-burial', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: 'premature-burial', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'a unicode name', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: '☃'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: '☃', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: '☃', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'when given an invalid alias', ->
context 'when given a UUID as a name', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c'
uuid: '48162884-d42f-4110-bdb2-9d17db996993'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty name', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: undefined
uuid: 'ecca684d-68ba-47d9-bb93-5124f20936cc'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: undefined, (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty uuid', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: 'burlap-sack'
uuid: undefined
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given non-uuid as the uuid', ->
beforeEach (done) ->
auth =
username: '899801b3-e877-4c69-93db-89bd9787ceea'
password: 'user-token'
alias =
name: 'burlap-sack'
uuid: 'billy-club'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
| 173418 | http = require 'http'
request = require 'request'
shmock = require 'shmock'
Server = require '../../src/server'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
describe 'POST /aliases', ->
beforeEach ->
@meshblu = shmock 0xd00d
afterEach (done) ->
@meshblu.close => done()
beforeEach (done) ->
meshbluConfig =
server: 'localhost'
port: 0xd00d
serverOptions =
port: undefined,
disableLogging: true
meshbluConfig: meshbluConfig
mongoDbUri: 'mongodb://127.0.0.1/test-uuid-alias-service'
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach ->
@whoamiHandler = @meshblu.get('/v2/whoami')
.reply(200, '{"uuid": "899801b3-e877-4c69-93db-89bd9787ceea"}')
beforeEach (done) ->
@datastore = new Datastore
database: mongojs 'mongodb://127.0.0.1/test-uuid-alias-service'
collection: 'aliases'
@datastore.remove {}, (error) => done() # delete everything
afterEach (done) ->
@server.stop => done()
context 'when given a valid alias', ->
context 'an ascii name', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: 'premature-burial'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'premature-burial', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: 'premature-burial', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'a unicode name', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: '☃'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: '☃', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: '☃', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'when given an invalid alias', ->
context 'when given a UUID as a name', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c'
uuid: '48162884-d42f-4110-bdb2-9d17db996993'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty name', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: undefined
uuid: 'ecca684d-68ba-47d9-bb93-5124f20936cc'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: undefined, (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty uuid', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: 'burlap-sack'
uuid: undefined
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given non-uuid as the uuid', ->
beforeEach (done) ->
auth =
username: '<PASSWORD>'
password: '<PASSWORD>'
alias =
name: 'burlap-sack'
uuid: 'billy-club'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
| true | http = require 'http'
request = require 'request'
shmock = require 'shmock'
Server = require '../../src/server'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
describe 'POST /aliases', ->
beforeEach ->
@meshblu = shmock 0xd00d
afterEach (done) ->
@meshblu.close => done()
beforeEach (done) ->
meshbluConfig =
server: 'localhost'
port: 0xd00d
serverOptions =
port: undefined,
disableLogging: true
meshbluConfig: meshbluConfig
mongoDbUri: 'mongodb://127.0.0.1/test-uuid-alias-service'
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach ->
@whoamiHandler = @meshblu.get('/v2/whoami')
.reply(200, '{"uuid": "899801b3-e877-4c69-93db-89bd9787ceea"}')
beforeEach (done) ->
@datastore = new Datastore
database: mongojs 'mongodb://127.0.0.1/test-uuid-alias-service'
collection: 'aliases'
@datastore.remove {}, (error) => done() # delete everything
afterEach (done) ->
@server.stop => done()
context 'when given a valid alias', ->
context 'an ascii name', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: 'premature-burial'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'premature-burial', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: 'premature-burial', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'a unicode name', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: '☃'
uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: '☃', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 201', ->
expect(@response.statusCode).to.equal 201
it 'create an alias in mongo', ->
expect(@alias).to.contain name: '☃', uuid: 'b38d4757-6f91-4aee-8ffb-ff53abc796a2', owner: '899801b3-e877-4c69-93db-89bd9787ceea'
context 'when given an invalid alias', ->
context 'when given a UUID as a name', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c'
uuid: '48162884-d42f-4110-bdb2-9d17db996993'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'c38b942c-f851-4ef8-a5a0-65b0ea960a4c', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty name', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: undefined
uuid: 'ecca684d-68ba-47d9-bb93-5124f20936cc'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: undefined, (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given an empty uuid', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: 'burlap-sack'
uuid: undefined
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
context 'when given non-uuid as the uuid', ->
beforeEach (done) ->
auth =
username: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
alias =
name: 'burlap-sack'
uuid: 'billy-club'
options =
auth: auth
json: alias
request.post "http://localhost:#{@serverPort}/aliases", options, (error, @response, @body) =>
done error
beforeEach (done) ->
@datastore.findOne name: 'burlap-sack', (error, @alias) =>
done error
it 'should call the whoamiHandler', ->
expect(@whoamiHandler.isDone).to.be.true
it 'should respond with a 422', ->
expect(@response.statusCode).to.equal 422
it 'should not create an alias in mongo', ->
expect(@alias).to.not.exist
|
[
{
"context": "v.POSTMARK_API_KEY)\n\nclient.emails.send(\n From: 'matt@awesometalk.com'\n To: 'matt.insler@gmail.com'\n Subject: 'I like",
"end": 148,
"score": 0.9999247789382935,
"start": 128,
"tag": "EMAIL",
"value": "matt@awesometalk.com"
},
{
"context": "mails.send(\n From: '... | examples/basic.coffee | mattinsler/postmark.node | 1 | Postmark = require '../lib/postmark'
client = new Postmark(api_key: process.env.POSTMARK_API_KEY)
client.emails.send(
From: 'matt@awesometalk.com'
To: 'matt.insler@gmail.com'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
)
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
client.emails.batch([
From: 'matt@awesometalk.com'
To: 'matt.insler@gmail.com'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
])
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
| 64695 | Postmark = require '../lib/postmark'
client = new Postmark(api_key: process.env.POSTMARK_API_KEY)
client.emails.send(
From: '<EMAIL>'
To: '<EMAIL>'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
)
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
client.emails.batch([
From: '<EMAIL>'
To: '<EMAIL>'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
])
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
| true | Postmark = require '../lib/postmark'
client = new Postmark(api_key: process.env.POSTMARK_API_KEY)
client.emails.send(
From: 'PI:EMAIL:<EMAIL>END_PI'
To: 'PI:EMAIL:<EMAIL>END_PI'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
)
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
client.emails.batch([
From: 'PI:EMAIL:<EMAIL>END_PI'
To: 'PI:EMAIL:<EMAIL>END_PI'
Subject: 'I like puppies',
HtmlBody: '<b>PUPPIES!!!</b>'
])
.then -> console.log 'then', arguments
.catch (err) -> console.log 'catch', err.stack
.finally -> console.log 'finally', arguments
|
[
{
"context": "###\n * https://github.com/jkuetemeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg K",
"end": 37,
"score": 0.9971939921379089,
"start": 26,
"tag": "USERNAME",
"value": "jkuetemeier"
},
{
"context": "temeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg Kütemei... | src/index.coffee | kuetemeier/gulp-tasks-common | 0 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 Jörg Kütemeier
* Licensed under the MIT license.
###
_ = require 'lodash'
common = {
# load config from 'config' dir
config : require('./config')
}
# load tasks from `tasks` dir
tasks = require('requiredir')('tasks')
# hack, don't know where length and toArray are coming from...
delete tasks.length
delete tasks.toArray
common.tasks = _.mapValues tasks, (value) ->
value(common)
# prepare common gulp tasks
common.register_tasks = require('./register_tasks')(common)
module.exports = common
| 180614 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
_ = require 'lodash'
common = {
# load config from 'config' dir
config : require('./config')
}
# load tasks from `tasks` dir
tasks = require('requiredir')('tasks')
# hack, don't know where length and toArray are coming from...
delete tasks.length
delete tasks.toArray
common.tasks = _.mapValues tasks, (value) ->
value(common)
# prepare common gulp tasks
common.register_tasks = require('./register_tasks')(common)
module.exports = common
| true | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
_ = require 'lodash'
common = {
# load config from 'config' dir
config : require('./config')
}
# load tasks from `tasks` dir
tasks = require('requiredir')('tasks')
# hack, don't know where length and toArray are coming from...
delete tasks.length
delete tasks.toArray
common.tasks = _.mapValues tasks, (value) ->
value(common)
# prepare common gulp tasks
common.register_tasks = require('./register_tasks')(common)
module.exports = common
|
[
{
"context": " customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com.\"\n\n invalid_user: \" Your account d",
"end": 816,
"score": 0.999927282333374,
"start": 790,
"tag": "EMAIL",
"value": "support@t-mobileatwork.com"
},
{
"context": " customer s... | pages/error/index/error_index.coffee | signonsridhar/sridhar_hbs | 0 | define([
'bases/page'
'css! pages/error/index/error_index'
],
(BasePage)->
BasePage.extend({
messages:{
reset_password_link_sent: "A link to reset your password has been sent to your email account.
<a class='link_to_security_questions'>I didn\’t get the reset password email.</a>",
activation_link_sent: "A new activation link has been sent to your email account.",
reset_password_success: "Your password has been reset. We sent you a notification email. You will now be directed to
<a class='link_to_login'>the login page.</a>",
account_locked: "We have problems logging you in. Please contact customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com."
invalid_user: " Your account does not exist. Please contact customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com."
access_denied: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com."
terminated: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com."
unknown_error: " Unknown service error"
1177: "We have problems log you in. Please contact customer support center at 1 (800) 286-0203 or at support@t-mobileatwork.com."
},
init:(elem, options)->
this.setup_viewmodel({
msg: ''
title: ''
})
this.render('error/index/error_index')
'.link_to_security_questions click':()->
window.location = can.route.url({ main:'security_questions', email: can.route.attr('email')})
'.link_to_login click':()->
window.location = can.route.url({ main:'auth',sub: 'login'})
switch_sub: (sub)->
this.viewmodel.attr('msg', this.messages[sub])
if !isNaN(parseInt(sub))
sub = 'Error'
this.viewmodel.attr('title', sub)
})
) | 56656 | define([
'bases/page'
'css! pages/error/index/error_index'
],
(BasePage)->
BasePage.extend({
messages:{
reset_password_link_sent: "A link to reset your password has been sent to your email account.
<a class='link_to_security_questions'>I didn\’t get the reset password email.</a>",
activation_link_sent: "A new activation link has been sent to your email account.",
reset_password_success: "Your password has been reset. We sent you a notification email. You will now be directed to
<a class='link_to_login'>the login page.</a>",
account_locked: "We have problems logging you in. Please contact customer support center at 1 (800) 286-0203 or at <EMAIL>."
invalid_user: " Your account does not exist. Please contact customer support center at 1 (800) 286-0203 or at <EMAIL>."
access_denied: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at <EMAIL>."
terminated: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at <EMAIL>."
unknown_error: " Unknown service error"
1177: "We have problems log you in. Please contact customer support center at 1 (800) 286-0203 or at <EMAIL>."
},
init:(elem, options)->
this.setup_viewmodel({
msg: ''
title: ''
})
this.render('error/index/error_index')
'.link_to_security_questions click':()->
window.location = can.route.url({ main:'security_questions', email: can.route.attr('email')})
'.link_to_login click':()->
window.location = can.route.url({ main:'auth',sub: 'login'})
switch_sub: (sub)->
this.viewmodel.attr('msg', this.messages[sub])
if !isNaN(parseInt(sub))
sub = 'Error'
this.viewmodel.attr('title', sub)
})
) | true | define([
'bases/page'
'css! pages/error/index/error_index'
],
(BasePage)->
BasePage.extend({
messages:{
reset_password_link_sent: "A link to reset your password has been sent to your email account.
<a class='link_to_security_questions'>I didn\’t get the reset password email.</a>",
activation_link_sent: "A new activation link has been sent to your email account.",
reset_password_success: "Your password has been reset. We sent you a notification email. You will now be directed to
<a class='link_to_login'>the login page.</a>",
account_locked: "We have problems logging you in. Please contact customer support center at 1 (800) 286-0203 or at PI:EMAIL:<EMAIL>END_PI."
invalid_user: " Your account does not exist. Please contact customer support center at 1 (800) 286-0203 or at PI:EMAIL:<EMAIL>END_PI."
access_denied: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at PI:EMAIL:<EMAIL>END_PI."
terminated: " Your account is restricted. Please contact customer support center at 1 (800) 286-0203 or at PI:EMAIL:<EMAIL>END_PI."
unknown_error: " Unknown service error"
1177: "We have problems log you in. Please contact customer support center at 1 (800) 286-0203 or at PI:EMAIL:<EMAIL>END_PI."
},
init:(elem, options)->
this.setup_viewmodel({
msg: ''
title: ''
})
this.render('error/index/error_index')
'.link_to_security_questions click':()->
window.location = can.route.url({ main:'security_questions', email: can.route.attr('email')})
'.link_to_login click':()->
window.location = can.route.url({ main:'auth',sub: 'login'})
switch_sub: (sub)->
this.viewmodel.attr('msg', this.messages[sub])
if !isNaN(parseInt(sub))
sub = 'Error'
this.viewmodel.attr('title', sub)
})
) |
[
{
"context": ", ->\n subject = FileInfo.fromJson\n name: 'name'\n lastModified: new Date('2013-08-07T10:28:1",
"end": 187,
"score": 0.8295201063156128,
"start": 183,
"tag": "NAME",
"value": "name"
},
{
"context": "\n subject = FileInfo.fromFile\n name: 'name'... | test/MassUpload/FileInfoSpec.coffee | overview/js-mass-upload | 2 | FileInfo = require('../../src/MassUpload/FileInfo')
describe 'MassUpload/FileInfo', ->
subject = undefined
describe '.fromJson', ->
subject = FileInfo.fromJson
name: 'name'
lastModified: new Date('2013-08-07T10:28:13-04:00').valueOf()
total: 100000
loaded: 20000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(100000)
it 'should have loaded', ->
expect(subject.loaded).to.eq(20000)
describe '.fromFile', ->
beforeEach ->
subject = FileInfo.fromFile
name: 'name'
lastModified: 1375885693000
size: 10000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(10000)
it 'should have loaded=0', ->
expect(subject.loaded).to.eq(0)
it 'should use .webkitRelativePath if there is one', ->
subject = FileInfo.fromFile
name: 'name'
lastModified: new Date().valueOf()
size: 10000
webkitRelativePath: 'foo/bar/name'
expect(subject.name).to.eq('foo/bar/name')
| 121410 | FileInfo = require('../../src/MassUpload/FileInfo')
describe 'MassUpload/FileInfo', ->
subject = undefined
describe '.fromJson', ->
subject = FileInfo.fromJson
name: '<NAME>'
lastModified: new Date('2013-08-07T10:28:13-04:00').valueOf()
total: 100000
loaded: 20000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(100000)
it 'should have loaded', ->
expect(subject.loaded).to.eq(20000)
describe '.fromFile', ->
beforeEach ->
subject = FileInfo.fromFile
name: '<NAME>'
lastModified: 1375885693000
size: 10000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(10000)
it 'should have loaded=0', ->
expect(subject.loaded).to.eq(0)
it 'should use .webkitRelativePath if there is one', ->
subject = FileInfo.fromFile
name: '<NAME>'
lastModified: new Date().valueOf()
size: 10000
webkitRelativePath: 'foo/bar/name'
expect(subject.name).to.eq('foo/bar/name')
| true | FileInfo = require('../../src/MassUpload/FileInfo')
describe 'MassUpload/FileInfo', ->
subject = undefined
describe '.fromJson', ->
subject = FileInfo.fromJson
name: 'PI:NAME:<NAME>END_PI'
lastModified: new Date('2013-08-07T10:28:13-04:00').valueOf()
total: 100000
loaded: 20000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(100000)
it 'should have loaded', ->
expect(subject.loaded).to.eq(20000)
describe '.fromFile', ->
beforeEach ->
subject = FileInfo.fromFile
name: 'PI:NAME:<NAME>END_PI'
lastModified: 1375885693000
size: 10000
it 'should have name', ->
expect(subject.name).to.eq('name')
it 'should have lastModified', ->
expect(subject.lastModified).to.eq(1375885693000)
it 'should have total', ->
expect(subject.total).to.eq(10000)
it 'should have loaded=0', ->
expect(subject.loaded).to.eq(0)
it 'should use .webkitRelativePath if there is one', ->
subject = FileInfo.fromFile
name: 'PI:NAME:<NAME>END_PI'
lastModified: new Date().valueOf()
size: 10000
webkitRelativePath: 'foo/bar/name'
expect(subject.name).to.eq('foo/bar/name')
|
[
{
"context": "ition.end.vim-help\"\n\n\n\t# “VIM REFERENCE MANUAL by Bring Moolah”\n\tmanualTitle:\n\t\tname: \"markup.heading.manual-ti",
"end": 4784,
"score": 0.9998685717582703,
"start": 4772,
"tag": "NAME",
"value": "Bring Moolah"
}
] | grammars/help.cson | Alhadis/atom-vimL | 15 | name: "Vim Help"
scopeName: "text.vim-help"
fileTypes: ["doc/vi_diff.txt"] # Dummy extension; assignment handled programmatically
patterns: [{
# Well-formed header declaring *filename.txt*, target Vim version, and timestamp
name: "meta.file-header.vim-help"
match: "(?i)\\A(\\*[#-)!+-~]+\\*)[ \\t]+(For\\s+Vim\\s+version\\s*[\\d.]+)[ \\t]+Last\\s+changed?:\\s*(\\S.*?)\\s*$"
captures:
1: patterns: [include: "#tag"]
2: patterns: [include: "#vimVersion"]
3: name: "constant.numeric.date.last-changed.vim-help"
}, include: "#main"]
repository:
main:
patterns: [
{include: "#tag"}
{include: "#link"}
{include: "#special"}
{include: "#option"}
{include: "#command"}
{include: "#codeBlock"}
{include: "#manualTitle"}
{include: "#columnHeading"}
{include: "#sectionDelimiter"}
{include: "#vimVersion"}
{include: "#url"}
{include: "#other"}
]
# Verbatim code-block
codeBlock:
name: "meta.example.vim-help"
begin: "(?:(?<=\\s)|^)(>)$"
end: "^(<)|(?=^\\S)"
beginCaptures: 1: name: "keyword.control.example.begin.vim-help"
endCaptures: 1: name: "keyword.control.example.end.vim-help"
contentName: "markup.raw.code.verbatim.vim-help"
# Heading~
columnHeading:
name: "markup.heading.column-title.vim-help"
match: "^\\s*\\S.*(~)$"
captures:
1: name: "keyword.operator.column-marker.tilde.vim-help"
# `:command`
command:
name: "markup.raw.command.vim-help"
match: "(`)([^` \\t]+)(`)"
captures:
1: name: "punctuation.definition.link.begin.vim-help"
2: patterns: [include: "source.viml"]
3: name: "punctuation.definition.link.end.vim-help"
# |link|
link:
name: "meta.link.vim-help"
match: '(\\|)([^"*|]+)(\\|)'
captures:
1: name: "meta.separator.punctuation.link.begin.vim-help"
2: name: "constant.other.link.vim-help"
3: name: "meta.separator.punctuation.link.end.vim-help"
# “Special” keywords (according to `help.vim`)
special:
patterns: [{
name: "entity.name.keyword.vim-help"
match: "(<)N(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)N(?=\\.(?:$|\\s))"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\()N(>)"
captures:
1: name: "punctuation.definition.bracket.round.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)N(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
match: "(N) (N)"
captures:
1: name: "entity.name.keyword.vim-help"
2: name: "entity.name.keyword.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "N(?=th|-1)"
},{
name: "entity.name.keyword.vim-help"
match: "({)[-a-zA-Z0-9'\"*+/:%#=\\[\\]<>.,]+(})"
captures:
1: name: "punctuation.definition.bracket.curly.begin.vim-help"
2: name: "punctuation.definition.bracket.curly.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(?<=\\s)(\\[)[-a-z^A-Z0-9_]{2,}(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[-a-zA-Z0-9_]+(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[SCM]-.(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)(?:\\+\\+opt|[-+]?num|\\+?cmd|addr|arguments|arg|count|group|ident|line|offset|range)(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "\\bCTRL(-)(?:.|Break|Del|Insert|PageDown|PageUp|({)char(}))"
captures:
1: name: "punctuation.delimiter.separator.dash.hyphen.vim-help"
2: name: "punctuation.definition.bracket.curly.begin.vim-help"
3: name: "punctuation.definition.bracket.curly.end.vim-help"
}]
# *tag-name*
tag:
name: "storage.link.hypertext.vim-help"
match: "(\\*)[#-)!+-~]+(\\*)(?=\\s|$)"
captures:
1: name: "punctuation.definition.begin.vim-help"
2: name: "punctuation.definition.end.vim-help"
# “VIM REFERENCE MANUAL by Bring Moolah”
manualTitle:
name: "markup.heading.manual-title.vim-help"
match: "^[ \\t]+(VIM REFERENCE.*)\\s*$"
captures:
1: name: "constant.other.title-text.vim-help"
# 'singlequoted' Vim option
option:
patterns: [{
name: "entity.name.tag.option.vim-help"
match: "(')[a-z]{2,}(')"
captures:
1: name: "punctuation.definition.begin.option.vim-help"
2: name: "punctuation.definition.end.option.vim-help"
}]
# Borders, dividers, whatever
sectionDelimiter:
name: "constant.other.section.delimiter.vim-help"
match: "^===.*===$|^---.*--$"
# Web address
url:
name: "constant.other.reference.link.vim-help"
match: """(?x)
(?:(?:(?:https?|ftp|gopher)://|(?:mailto|file|news):)[^'\\x20\\t<>"]+
|(?:www|web|w3)[a-z0-9_-]*\\.[a-z0-9._-]+\\.[^'\\x20\\t<>"]+)
[a-zA-Z0-9/]
"""
# “For Vim version X.X”
vimVersion:
name: "entity.other.vim-version.vim-help"
match: "\\bVim version [0-9][0-9.a-z]*"
# Other... *things*, I don't even know how to group them.
other:
patterns: [{
# Warning (mild yellow/orange highlighting)
name: "markup.changed.${1:/downcase}.vim-help"
match: "\\b(DEPRECATED|WARNING|(?:Deprecated|Warning)(?=:))(:|\\b)"
captures:
2: name: "keyword.operator.assignment.key-value.colon.vim-help"
},{
# Error (angry red highlighting)
name: "invalid.illegal.error.vim-help"
match: "\\t[* ]Error\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Task reminder (yellow in Vim; blue on GitHub)
name: "markup.ignored.todo.vim-help"
match: "\\t[* ]Todo\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Comment
name: "comment.line.vim-help"
match: "\\t[* ](Comment)\\t+([a-z].*)"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Underlined text
name: "constant.other.reference.link.vim-help"
match: "\\t[* ]Underlined\\t+[a-z].*"
},{
# And then... all of this:
name: "meta.${2:/downcase}-line.vim-help"
match: """(?x) \\t (\\*|\\x20)
(Boolean|Character|Conditional|Constant|Debug|Define|Delimiter
|Exception|Float|Function|Identifier|Include|Keyword|Label|Macro
|Number|Operator|PreCondit|PreProc|Repeat|SpecialChar
|SpecialComment|Special|Statement|StorageClass|String
|Structure|Tag|Typedef|Type)
(\\t+ (["Aa-z].*))
"""
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
2: name: "storage.type.${2:/downcase}.vim-help"
3: name: "meta.output.vim-help"
4: name: "${2:/downcase}.vim-help"
}]
| 170842 | name: "Vim Help"
scopeName: "text.vim-help"
fileTypes: ["doc/vi_diff.txt"] # Dummy extension; assignment handled programmatically
patterns: [{
# Well-formed header declaring *filename.txt*, target Vim version, and timestamp
name: "meta.file-header.vim-help"
match: "(?i)\\A(\\*[#-)!+-~]+\\*)[ \\t]+(For\\s+Vim\\s+version\\s*[\\d.]+)[ \\t]+Last\\s+changed?:\\s*(\\S.*?)\\s*$"
captures:
1: patterns: [include: "#tag"]
2: patterns: [include: "#vimVersion"]
3: name: "constant.numeric.date.last-changed.vim-help"
}, include: "#main"]
repository:
main:
patterns: [
{include: "#tag"}
{include: "#link"}
{include: "#special"}
{include: "#option"}
{include: "#command"}
{include: "#codeBlock"}
{include: "#manualTitle"}
{include: "#columnHeading"}
{include: "#sectionDelimiter"}
{include: "#vimVersion"}
{include: "#url"}
{include: "#other"}
]
# Verbatim code-block
codeBlock:
name: "meta.example.vim-help"
begin: "(?:(?<=\\s)|^)(>)$"
end: "^(<)|(?=^\\S)"
beginCaptures: 1: name: "keyword.control.example.begin.vim-help"
endCaptures: 1: name: "keyword.control.example.end.vim-help"
contentName: "markup.raw.code.verbatim.vim-help"
# Heading~
columnHeading:
name: "markup.heading.column-title.vim-help"
match: "^\\s*\\S.*(~)$"
captures:
1: name: "keyword.operator.column-marker.tilde.vim-help"
# `:command`
command:
name: "markup.raw.command.vim-help"
match: "(`)([^` \\t]+)(`)"
captures:
1: name: "punctuation.definition.link.begin.vim-help"
2: patterns: [include: "source.viml"]
3: name: "punctuation.definition.link.end.vim-help"
# |link|
link:
name: "meta.link.vim-help"
match: '(\\|)([^"*|]+)(\\|)'
captures:
1: name: "meta.separator.punctuation.link.begin.vim-help"
2: name: "constant.other.link.vim-help"
3: name: "meta.separator.punctuation.link.end.vim-help"
# “Special” keywords (according to `help.vim`)
special:
patterns: [{
name: "entity.name.keyword.vim-help"
match: "(<)N(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)N(?=\\.(?:$|\\s))"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\()N(>)"
captures:
1: name: "punctuation.definition.bracket.round.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)N(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
match: "(N) (N)"
captures:
1: name: "entity.name.keyword.vim-help"
2: name: "entity.name.keyword.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "N(?=th|-1)"
},{
name: "entity.name.keyword.vim-help"
match: "({)[-a-zA-Z0-9'\"*+/:%#=\\[\\]<>.,]+(})"
captures:
1: name: "punctuation.definition.bracket.curly.begin.vim-help"
2: name: "punctuation.definition.bracket.curly.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(?<=\\s)(\\[)[-a-z^A-Z0-9_]{2,}(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[-a-zA-Z0-9_]+(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[SCM]-.(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)(?:\\+\\+opt|[-+]?num|\\+?cmd|addr|arguments|arg|count|group|ident|line|offset|range)(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "\\bCTRL(-)(?:.|Break|Del|Insert|PageDown|PageUp|({)char(}))"
captures:
1: name: "punctuation.delimiter.separator.dash.hyphen.vim-help"
2: name: "punctuation.definition.bracket.curly.begin.vim-help"
3: name: "punctuation.definition.bracket.curly.end.vim-help"
}]
# *tag-name*
tag:
name: "storage.link.hypertext.vim-help"
match: "(\\*)[#-)!+-~]+(\\*)(?=\\s|$)"
captures:
1: name: "punctuation.definition.begin.vim-help"
2: name: "punctuation.definition.end.vim-help"
# “VIM REFERENCE MANUAL by <NAME>”
manualTitle:
name: "markup.heading.manual-title.vim-help"
match: "^[ \\t]+(VIM REFERENCE.*)\\s*$"
captures:
1: name: "constant.other.title-text.vim-help"
# 'singlequoted' Vim option
option:
patterns: [{
name: "entity.name.tag.option.vim-help"
match: "(')[a-z]{2,}(')"
captures:
1: name: "punctuation.definition.begin.option.vim-help"
2: name: "punctuation.definition.end.option.vim-help"
}]
# Borders, dividers, whatever
sectionDelimiter:
name: "constant.other.section.delimiter.vim-help"
match: "^===.*===$|^---.*--$"
# Web address
url:
name: "constant.other.reference.link.vim-help"
match: """(?x)
(?:(?:(?:https?|ftp|gopher)://|(?:mailto|file|news):)[^'\\x20\\t<>"]+
|(?:www|web|w3)[a-z0-9_-]*\\.[a-z0-9._-]+\\.[^'\\x20\\t<>"]+)
[a-zA-Z0-9/]
"""
# “For Vim version X.X”
vimVersion:
name: "entity.other.vim-version.vim-help"
match: "\\bVim version [0-9][0-9.a-z]*"
# Other... *things*, I don't even know how to group them.
other:
patterns: [{
# Warning (mild yellow/orange highlighting)
name: "markup.changed.${1:/downcase}.vim-help"
match: "\\b(DEPRECATED|WARNING|(?:Deprecated|Warning)(?=:))(:|\\b)"
captures:
2: name: "keyword.operator.assignment.key-value.colon.vim-help"
},{
# Error (angry red highlighting)
name: "invalid.illegal.error.vim-help"
match: "\\t[* ]Error\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Task reminder (yellow in Vim; blue on GitHub)
name: "markup.ignored.todo.vim-help"
match: "\\t[* ]Todo\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Comment
name: "comment.line.vim-help"
match: "\\t[* ](Comment)\\t+([a-z].*)"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Underlined text
name: "constant.other.reference.link.vim-help"
match: "\\t[* ]Underlined\\t+[a-z].*"
},{
# And then... all of this:
name: "meta.${2:/downcase}-line.vim-help"
match: """(?x) \\t (\\*|\\x20)
(Boolean|Character|Conditional|Constant|Debug|Define|Delimiter
|Exception|Float|Function|Identifier|Include|Keyword|Label|Macro
|Number|Operator|PreCondit|PreProc|Repeat|SpecialChar
|SpecialComment|Special|Statement|StorageClass|String
|Structure|Tag|Typedef|Type)
(\\t+ (["Aa-z].*))
"""
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
2: name: "storage.type.${2:/downcase}.vim-help"
3: name: "meta.output.vim-help"
4: name: "${2:/downcase}.vim-help"
}]
| true | name: "Vim Help"
scopeName: "text.vim-help"
fileTypes: ["doc/vi_diff.txt"] # Dummy extension; assignment handled programmatically
patterns: [{
# Well-formed header declaring *filename.txt*, target Vim version, and timestamp
name: "meta.file-header.vim-help"
match: "(?i)\\A(\\*[#-)!+-~]+\\*)[ \\t]+(For\\s+Vim\\s+version\\s*[\\d.]+)[ \\t]+Last\\s+changed?:\\s*(\\S.*?)\\s*$"
captures:
1: patterns: [include: "#tag"]
2: patterns: [include: "#vimVersion"]
3: name: "constant.numeric.date.last-changed.vim-help"
}, include: "#main"]
repository:
main:
patterns: [
{include: "#tag"}
{include: "#link"}
{include: "#special"}
{include: "#option"}
{include: "#command"}
{include: "#codeBlock"}
{include: "#manualTitle"}
{include: "#columnHeading"}
{include: "#sectionDelimiter"}
{include: "#vimVersion"}
{include: "#url"}
{include: "#other"}
]
# Verbatim code-block
codeBlock:
name: "meta.example.vim-help"
begin: "(?:(?<=\\s)|^)(>)$"
end: "^(<)|(?=^\\S)"
beginCaptures: 1: name: "keyword.control.example.begin.vim-help"
endCaptures: 1: name: "keyword.control.example.end.vim-help"
contentName: "markup.raw.code.verbatim.vim-help"
# Heading~
columnHeading:
name: "markup.heading.column-title.vim-help"
match: "^\\s*\\S.*(~)$"
captures:
1: name: "keyword.operator.column-marker.tilde.vim-help"
# `:command`
command:
name: "markup.raw.command.vim-help"
match: "(`)([^` \\t]+)(`)"
captures:
1: name: "punctuation.definition.link.begin.vim-help"
2: patterns: [include: "source.viml"]
3: name: "punctuation.definition.link.end.vim-help"
# |link|
link:
name: "meta.link.vim-help"
match: '(\\|)([^"*|]+)(\\|)'
captures:
1: name: "meta.separator.punctuation.link.begin.vim-help"
2: name: "constant.other.link.vim-help"
3: name: "meta.separator.punctuation.link.end.vim-help"
# “Special” keywords (according to `help.vim`)
special:
patterns: [{
name: "entity.name.keyword.vim-help"
match: "(<)N(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)N(?=\\.(?:$|\\s))"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\()N(>)"
captures:
1: name: "punctuation.definition.bracket.round.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)N(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
match: "(N) (N)"
captures:
1: name: "entity.name.keyword.vim-help"
2: name: "entity.name.keyword.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "N(?=th|-1)"
},{
name: "entity.name.keyword.vim-help"
match: "({)[-a-zA-Z0-9'\"*+/:%#=\\[\\]<>.,]+(})"
captures:
1: name: "punctuation.definition.bracket.curly.begin.vim-help"
2: name: "punctuation.definition.bracket.curly.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(?<=\\s)(\\[)[-a-z^A-Z0-9_]{2,}(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[-a-zA-Z0-9_]+(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(<)[SCM]-.(>)"
captures:
1: name: "punctuation.definition.bracket.angle.begin.vim-help"
2: name: "punctuation.definition.bracket.angle.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "(\\[)(?:\\+\\+opt|[-+]?num|\\+?cmd|addr|arguments|arg|count|group|ident|line|offset|range)(\\])"
captures:
1: name: "punctuation.definition.bracket.square.begin.vim-help"
2: name: "punctuation.definition.bracket.square.end.vim-help"
},{
name: "entity.name.keyword.vim-help"
match: "\\bCTRL(-)(?:.|Break|Del|Insert|PageDown|PageUp|({)char(}))"
captures:
1: name: "punctuation.delimiter.separator.dash.hyphen.vim-help"
2: name: "punctuation.definition.bracket.curly.begin.vim-help"
3: name: "punctuation.definition.bracket.curly.end.vim-help"
}]
# *tag-name*
tag:
name: "storage.link.hypertext.vim-help"
match: "(\\*)[#-)!+-~]+(\\*)(?=\\s|$)"
captures:
1: name: "punctuation.definition.begin.vim-help"
2: name: "punctuation.definition.end.vim-help"
# “VIM REFERENCE MANUAL by PI:NAME:<NAME>END_PI”
manualTitle:
name: "markup.heading.manual-title.vim-help"
match: "^[ \\t]+(VIM REFERENCE.*)\\s*$"
captures:
1: name: "constant.other.title-text.vim-help"
# 'singlequoted' Vim option
option:
patterns: [{
name: "entity.name.tag.option.vim-help"
match: "(')[a-z]{2,}(')"
captures:
1: name: "punctuation.definition.begin.option.vim-help"
2: name: "punctuation.definition.end.option.vim-help"
}]
# Borders, dividers, whatever
sectionDelimiter:
name: "constant.other.section.delimiter.vim-help"
match: "^===.*===$|^---.*--$"
# Web address
url:
name: "constant.other.reference.link.vim-help"
match: """(?x)
(?:(?:(?:https?|ftp|gopher)://|(?:mailto|file|news):)[^'\\x20\\t<>"]+
|(?:www|web|w3)[a-z0-9_-]*\\.[a-z0-9._-]+\\.[^'\\x20\\t<>"]+)
[a-zA-Z0-9/]
"""
# “For Vim version X.X”
vimVersion:
name: "entity.other.vim-version.vim-help"
match: "\\bVim version [0-9][0-9.a-z]*"
# Other... *things*, I don't even know how to group them.
other:
patterns: [{
# Warning (mild yellow/orange highlighting)
name: "markup.changed.${1:/downcase}.vim-help"
match: "\\b(DEPRECATED|WARNING|(?:Deprecated|Warning)(?=:))(:|\\b)"
captures:
2: name: "keyword.operator.assignment.key-value.colon.vim-help"
},{
# Error (angry red highlighting)
name: "invalid.illegal.error.vim-help"
match: "\\t[* ]Error\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Task reminder (yellow in Vim; blue on GitHub)
name: "markup.ignored.todo.vim-help"
match: "\\t[* ]Todo\\t+[a-z].*"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Comment
name: "comment.line.vim-help"
match: "\\t[* ](Comment)\\t+([a-z].*)"
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
},{
# Underlined text
name: "constant.other.reference.link.vim-help"
match: "\\t[* ]Underlined\\t+[a-z].*"
},{
# And then... all of this:
name: "meta.${2:/downcase}-line.vim-help"
match: """(?x) \\t (\\*|\\x20)
(Boolean|Character|Conditional|Constant|Debug|Define|Delimiter
|Exception|Float|Function|Identifier|Include|Keyword|Label|Macro
|Number|Operator|PreCondit|PreProc|Repeat|SpecialChar
|SpecialComment|Special|Statement|StorageClass|String
|Structure|Tag|Typedef|Type)
(\\t+ (["Aa-z].*))
"""
captures:
1: name: "punctuation.separator.list-item.marker.vim-help"
2: name: "storage.type.${2:/downcase}.vim-help"
3: name: "meta.output.vim-help"
4: name: "${2:/downcase}.vim-help"
}]
|
[
{
"context": "tectFromCSRF = true\n Batman.config.CSRF_TOKEN = 'configOption!'\n\n @Model.get('all')\n equal @lastRequest.options",
"end": 2829,
"score": 0.7686464190483093,
"start": 2817,
"tag": "PASSWORD",
"value": "configOption"
}
] | tests/batman/extras/rails_extra_test.coffee | davidcornu/batman | 0 | QUnit.module "Batman.Rails: date encoding"
dateEqual = (a, b, args...) ->
equal a.getTime(), b.getTime(), args...
test "it parses ISO 8601 dates without a timezone offset in the local timezone", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06"), new Date(2012, 0, 3, 13, 35, 6)
test "it correctly parses ISO 8601 dates with a timezone offset", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-05:00"), new Date(Date.UTC(2012, 0, 3, 18, 35, 6))
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-07:00"), new Date(Date.UTC(2012, 0, 3, 20, 35, 6))
test "it uses the appropriate offset for the given timestamp", ->
sinon.stub Date.prototype, "getTimezoneOffset", -> if this.getFullYear() > 2000 then -660 else -600
dateEqual Batman.Encoders.railsDate.decode("2012-08-09T09:00:00"), new Date(Date.UTC(2012, 7, 8, 22))
dateEqual Batman.Encoders.railsDate.decode("1988-08-09T09:00:00"), new Date(Date.UTC(1988, 7, 8, 23))
QUnit.module "encodeTimestamps",
setup: ->
class @Model extends Batman.Model
test "it should be defined on models", ->
ok Batman.Model.encodeTimestamps
test "with no options should decode created_at and updated_at", ->
@Model.encodeTimestamps()
# FIXME when this is a model testcase, this could use assertDecoders
decoders = []
@Model::_batman.get("encoders").forEach (key, encoder) ->
decoders.push(key) if encoder.decode
notEqual decoders.indexOf('created_at', 'updated_at'), -1
test "should properly decode a Rails date", ->
@Model.encodeTimestamps('tested_at')
instance = @Model.createFromJSON(tested_at: "2012-01-03T13:35:06-05:00")
ok instance.get('tested_at') instanceof Date
dateEqual instance.get('tested_at'), new Date(1325615706000)
QUnit.module "Batman.Rails: CSRF protection",
setup: ->
theTest = this
class MockRailsStorage extends Batman.RailsStorage
request: (env) ->
theTest.lastRequest = env
class @Model extends Batman.Model
@persist MockRailsStorage
test "if protectFromCSRF is false, the request does not include a CSRF header", ->
Batman.config.protectFromCSRF = false
@Model.get('all')
ok !@lastRequest.options.headers?['X-CSRF-Token']
test "if protectFromCSRF is true and the appropriate meta tag exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
meta = document.createElement('meta')
meta.setAttribute('name', 'csrf-token')
meta.setAttribute('content', 'metaTag!')
document.head.appendChild(meta)
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'metaTag!'
test "if protectFromCSRF is true and the appropriate config option exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
Batman.config.CSRF_TOKEN = 'configOption!'
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'configOption!'
| 119698 | QUnit.module "Batman.Rails: date encoding"
dateEqual = (a, b, args...) ->
equal a.getTime(), b.getTime(), args...
test "it parses ISO 8601 dates without a timezone offset in the local timezone", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06"), new Date(2012, 0, 3, 13, 35, 6)
test "it correctly parses ISO 8601 dates with a timezone offset", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-05:00"), new Date(Date.UTC(2012, 0, 3, 18, 35, 6))
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-07:00"), new Date(Date.UTC(2012, 0, 3, 20, 35, 6))
test "it uses the appropriate offset for the given timestamp", ->
sinon.stub Date.prototype, "getTimezoneOffset", -> if this.getFullYear() > 2000 then -660 else -600
dateEqual Batman.Encoders.railsDate.decode("2012-08-09T09:00:00"), new Date(Date.UTC(2012, 7, 8, 22))
dateEqual Batman.Encoders.railsDate.decode("1988-08-09T09:00:00"), new Date(Date.UTC(1988, 7, 8, 23))
QUnit.module "encodeTimestamps",
setup: ->
class @Model extends Batman.Model
test "it should be defined on models", ->
ok Batman.Model.encodeTimestamps
test "with no options should decode created_at and updated_at", ->
@Model.encodeTimestamps()
# FIXME when this is a model testcase, this could use assertDecoders
decoders = []
@Model::_batman.get("encoders").forEach (key, encoder) ->
decoders.push(key) if encoder.decode
notEqual decoders.indexOf('created_at', 'updated_at'), -1
test "should properly decode a Rails date", ->
@Model.encodeTimestamps('tested_at')
instance = @Model.createFromJSON(tested_at: "2012-01-03T13:35:06-05:00")
ok instance.get('tested_at') instanceof Date
dateEqual instance.get('tested_at'), new Date(1325615706000)
QUnit.module "Batman.Rails: CSRF protection",
setup: ->
theTest = this
class MockRailsStorage extends Batman.RailsStorage
request: (env) ->
theTest.lastRequest = env
class @Model extends Batman.Model
@persist MockRailsStorage
test "if protectFromCSRF is false, the request does not include a CSRF header", ->
Batman.config.protectFromCSRF = false
@Model.get('all')
ok !@lastRequest.options.headers?['X-CSRF-Token']
test "if protectFromCSRF is true and the appropriate meta tag exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
meta = document.createElement('meta')
meta.setAttribute('name', 'csrf-token')
meta.setAttribute('content', 'metaTag!')
document.head.appendChild(meta)
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'metaTag!'
test "if protectFromCSRF is true and the appropriate config option exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
Batman.config.CSRF_TOKEN = '<PASSWORD>!'
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'configOption!'
| true | QUnit.module "Batman.Rails: date encoding"
dateEqual = (a, b, args...) ->
equal a.getTime(), b.getTime(), args...
test "it parses ISO 8601 dates without a timezone offset in the local timezone", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06"), new Date(2012, 0, 3, 13, 35, 6)
test "it correctly parses ISO 8601 dates with a timezone offset", ->
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-05:00"), new Date(Date.UTC(2012, 0, 3, 18, 35, 6))
dateEqual Batman.Encoders.railsDate.decode("2012-01-03T13:35:06-07:00"), new Date(Date.UTC(2012, 0, 3, 20, 35, 6))
test "it uses the appropriate offset for the given timestamp", ->
sinon.stub Date.prototype, "getTimezoneOffset", -> if this.getFullYear() > 2000 then -660 else -600
dateEqual Batman.Encoders.railsDate.decode("2012-08-09T09:00:00"), new Date(Date.UTC(2012, 7, 8, 22))
dateEqual Batman.Encoders.railsDate.decode("1988-08-09T09:00:00"), new Date(Date.UTC(1988, 7, 8, 23))
QUnit.module "encodeTimestamps",
setup: ->
class @Model extends Batman.Model
test "it should be defined on models", ->
ok Batman.Model.encodeTimestamps
test "with no options should decode created_at and updated_at", ->
@Model.encodeTimestamps()
# FIXME when this is a model testcase, this could use assertDecoders
decoders = []
@Model::_batman.get("encoders").forEach (key, encoder) ->
decoders.push(key) if encoder.decode
notEqual decoders.indexOf('created_at', 'updated_at'), -1
test "should properly decode a Rails date", ->
@Model.encodeTimestamps('tested_at')
instance = @Model.createFromJSON(tested_at: "2012-01-03T13:35:06-05:00")
ok instance.get('tested_at') instanceof Date
dateEqual instance.get('tested_at'), new Date(1325615706000)
QUnit.module "Batman.Rails: CSRF protection",
setup: ->
theTest = this
class MockRailsStorage extends Batman.RailsStorage
request: (env) ->
theTest.lastRequest = env
class @Model extends Batman.Model
@persist MockRailsStorage
test "if protectFromCSRF is false, the request does not include a CSRF header", ->
Batman.config.protectFromCSRF = false
@Model.get('all')
ok !@lastRequest.options.headers?['X-CSRF-Token']
test "if protectFromCSRF is true and the appropriate meta tag exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
meta = document.createElement('meta')
meta.setAttribute('name', 'csrf-token')
meta.setAttribute('content', 'metaTag!')
document.head.appendChild(meta)
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'metaTag!'
test "if protectFromCSRF is true and the appropriate config option exists, the request should include a CSRF header", ->
Batman.config.protectFromCSRF = true
Batman.config.CSRF_TOKEN = 'PI:PASSWORD:<PASSWORD>END_PI!'
@Model.get('all')
equal @lastRequest.options.headers['X-CSRF-Token'], 'configOption!'
|
[
{
"context": "#*\n# @fileoverview Common utils for AST.\n# @author Gyandeep Singh\n###\n\n'use strict'\n\nastUtils = require '../eslint-",
"end": 67,
"score": 0.9998416900634766,
"start": 53,
"tag": "NAME",
"value": "Gyandeep Singh"
}
] | src/util/ast-utils.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Common utils for AST.
# @author Gyandeep Singh
###
'use strict'
astUtils = require '../eslint-ast-utils'
{getStaticPropertyName} = astUtils
anyLoopPattern = /^WhileStatement|For$/
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# Get the precedence level based on the node type
# @param {ASTNode} node node to evaluate
# @returns {int} precedence level
# @private
###
getPrecedence = (node) ->
switch node.type
# when 'SequenceExpression'
# return 0
when 'AssignmentExpression' # ,'ArrowFunctionExpression', 'YieldExpression'
return 1
# when 'ConditionalExpression'
# return 3
when 'LogicalExpression'
switch node.operator
when '?'
return 3
when '||', 'or'
return 4
when '&&', 'and'
return 5
# no default
### falls through ###
when 'BinaryExpression'
switch node.operator
when '|'
return 6
when '^'
return 7
when '&'
return 8
when '==', '!=', '===', '!=='
return 9
when '<', '<=', '>', '>=', 'in', 'instanceof'
return 10
when '<<', '>>', '>>>'
return 11
when '+', '-'
return 12
when '*', '/', '%'
return 13
when '**'
return 15
# no default
### falls through ###
# when 'UnaryExpression', 'AwaitExpression'
# return 16
# when 'UpdateExpression'
# return 17
# when 'CallExpression'
# return 18
# when 'NewExpression'
# return 19
# else
# return 20
isLoop = (node) ->
!!(node and anyLoopPattern.test node.type)
isInLoop = (node) ->
currentNode = node
while currentNode and not astUtils.isFunction currentNode
return yes if isLoop currentNode
currentNode = currentNode.parent
no
getFunctionName = (node) ->
return null unless (
node?.type is 'FunctionExpression' and
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
node.parent.left.name
###*
# Gets the name and kind of the given function node.
#
# - `function foo() {}` .................... `function 'foo'`
# - `(function foo() {})` .................. `function 'foo'`
# - `(function() {})` ...................... `function`
# - `function* foo() {}` ................... `generator function 'foo'`
# - `(function* foo() {})` ................. `generator function 'foo'`
# - `(function*() {})` ..................... `generator function`
# - `() => {}` ............................. `arrow function`
# - `async () => {}` ....................... `async arrow function`
# - `({ foo: function foo() {} })` ......... `method 'foo'`
# - `({ foo: function() {} })` ............. `method 'foo'`
# - `({ ['foo']: function() {} })` ......... `method 'foo'`
# - `({ [foo]: function() {} })` ........... `method`
# - `({ foo() {} })` ....................... `method 'foo'`
# - `({ foo: function* foo() {} })` ........ `generator method 'foo'`
# - `({ foo: function*() {} })` ............ `generator method 'foo'`
# - `({ ['foo']: function*() {} })` ........ `generator method 'foo'`
# - `({ [foo]: function*() {} })` .......... `generator method`
# - `({ *foo() {} })` ...................... `generator method 'foo'`
# - `({ foo: async function foo() {} })` ... `async method 'foo'`
# - `({ foo: async function() {} })` ....... `async method 'foo'`
# - `({ ['foo']: async function() {} })` ... `async method 'foo'`
# - `({ [foo]: async function() {} })` ..... `async method`
# - `({ async foo() {} })` ................. `async method 'foo'`
# - `({ get foo() {} })` ................... `getter 'foo'`
# - `({ set foo(a) {} })` .................. `setter 'foo'`
# - `class A { constructor() {} }` ......... `constructor`
# - `class A { foo() {} }` ................. `method 'foo'`
# - `class A { *foo() {} }` ................ `generator method 'foo'`
# - `class A { async foo() {} }` ........... `async method 'foo'`
# - `class A { ['foo']() {} }` ............. `method 'foo'`
# - `class A { *['foo']() {} }` ............ `generator method 'foo'`
# - `class A { async ['foo']() {} }` ....... `async method 'foo'`
# - `class A { [foo]() {} }` ............... `method`
# - `class A { *[foo]() {} }` .............. `generator method`
# - `class A { async [foo]() {} }` ......... `async method`
# - `class A { get foo() {} }` ............. `getter 'foo'`
# - `class A { set foo(a) {} }` ............ `setter 'foo'`
# - `class A { static foo() {} }` .......... `static method 'foo'`
# - `class A { static *foo() {} }` ......... `static generator method 'foo'`
# - `class A { static async foo() {} }` .... `static async method 'foo'`
# - `class A { static get foo() {} }` ...... `static getter 'foo'`
# - `class A { static set foo(a) {} }` ..... `static setter 'foo'`
#
# @param {ASTNode} node - The function node to get.
# @returns {string} The name and kind of the function node.
###
getFunctionNameWithKind = (node) ->
{parent} = node
tokens = []
if parent.type is 'MethodDefinition' and parent.static
tokens.push 'static'
if node.async then tokens.push 'async'
if node.generator then tokens.push 'generator'
if node.type is 'ArrowFunctionExpression'
tokens.push 'arrow', 'function'
else if parent.type in ['Property', 'MethodDefinition']
return 'constructor' if parent.kind is 'constructor'
if parent.kind is 'get'
tokens.push 'getter'
else if parent.kind is 'set'
tokens.push 'setter'
else
tokens.push 'method'
else
tokens.push 'function'
name = getFunctionName node
name ?= getStaticPropertyName parent
tokens.push "'#{name}'" if name
tokens.join ' '
isIife = (func) ->
return no unless func?.type is 'FunctionExpression'
return yes if (
func.parent.type is 'UnaryExpression' and func.parent.operator is 'do'
)
return yes if (
func.parent.type is 'CallExpression' and func.parent.callee is func
)
no
hasIndentedLastLine = ({node, sourceCode}) ->
return no unless node.loc.start.line < node.loc.end.line
lastLineText =
sourceCode.getText()[(node.range[1] - node.loc.end.column)...node.range[1]]
match = /^\s+/.exec lastLineText
return no unless match
lastLineIndent = match[0]
lastLineIndent.length + 1 > node.loc.start.column
containsDeclaration = (node) ->
switch node?.type
when 'Identifier'
node.declaration
when 'ObjectPattern'
for prop in node.properties
return yes if containsDeclaration prop
no
when 'Property'
containsDeclaration node.value
when 'RestElement'
containsDeclaration node.argument
when 'ArrayPattern'
for element in node.elements
return yes if containsDeclaration element
no
when 'AssignmentPattern'
containsDeclaration node.left
isDeclarationAssignment = (node) ->
return no unless node?.type is 'AssignmentExpression'
containsDeclaration node.left
getDeclarationAssignmentAncestor = (node) ->
current = node
while current
switch current?.type
when 'AssignmentExpression'
return current
when 'ObjectPattern', 'ArrayPattern', 'Property', 'RestElement', 'Identifier', 'AssignmentPattern'
current = current.parent
else
return null
null
isFatArrowFunction = (node) ->
return unless node?
{bound, type, parent} = node
type is 'ArrowFunctionExpression' or
bound or
(parent?.type is 'MethodDefinition' and parent.bound)
isBoundMethod = (node) ->
return unless node?
{parent} = node
parent?.type is 'MethodDefinition' and parent.bound
convertCommentToJsStyleJsdoc = (comment) ->
comment.replace /^(\s*)#/gm, '$1*'
module.exports = {
getPrecedence
isInLoop
getFunctionName
getFunctionNameWithKind
isIife
hasIndentedLastLine
isDeclarationAssignment
isFatArrowFunction
isBoundMethod
convertCommentToJsStyleJsdoc
getDeclarationAssignmentAncestor
}
| 75622 | ###*
# @fileoverview Common utils for AST.
# @author <NAME>
###
'use strict'
astUtils = require '../eslint-ast-utils'
{getStaticPropertyName} = astUtils
anyLoopPattern = /^WhileStatement|For$/
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# Get the precedence level based on the node type
# @param {ASTNode} node node to evaluate
# @returns {int} precedence level
# @private
###
getPrecedence = (node) ->
switch node.type
# when 'SequenceExpression'
# return 0
when 'AssignmentExpression' # ,'ArrowFunctionExpression', 'YieldExpression'
return 1
# when 'ConditionalExpression'
# return 3
when 'LogicalExpression'
switch node.operator
when '?'
return 3
when '||', 'or'
return 4
when '&&', 'and'
return 5
# no default
### falls through ###
when 'BinaryExpression'
switch node.operator
when '|'
return 6
when '^'
return 7
when '&'
return 8
when '==', '!=', '===', '!=='
return 9
when '<', '<=', '>', '>=', 'in', 'instanceof'
return 10
when '<<', '>>', '>>>'
return 11
when '+', '-'
return 12
when '*', '/', '%'
return 13
when '**'
return 15
# no default
### falls through ###
# when 'UnaryExpression', 'AwaitExpression'
# return 16
# when 'UpdateExpression'
# return 17
# when 'CallExpression'
# return 18
# when 'NewExpression'
# return 19
# else
# return 20
isLoop = (node) ->
!!(node and anyLoopPattern.test node.type)
isInLoop = (node) ->
currentNode = node
while currentNode and not astUtils.isFunction currentNode
return yes if isLoop currentNode
currentNode = currentNode.parent
no
getFunctionName = (node) ->
return null unless (
node?.type is 'FunctionExpression' and
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
node.parent.left.name
###*
# Gets the name and kind of the given function node.
#
# - `function foo() {}` .................... `function 'foo'`
# - `(function foo() {})` .................. `function 'foo'`
# - `(function() {})` ...................... `function`
# - `function* foo() {}` ................... `generator function 'foo'`
# - `(function* foo() {})` ................. `generator function 'foo'`
# - `(function*() {})` ..................... `generator function`
# - `() => {}` ............................. `arrow function`
# - `async () => {}` ....................... `async arrow function`
# - `({ foo: function foo() {} })` ......... `method 'foo'`
# - `({ foo: function() {} })` ............. `method 'foo'`
# - `({ ['foo']: function() {} })` ......... `method 'foo'`
# - `({ [foo]: function() {} })` ........... `method`
# - `({ foo() {} })` ....................... `method 'foo'`
# - `({ foo: function* foo() {} })` ........ `generator method 'foo'`
# - `({ foo: function*() {} })` ............ `generator method 'foo'`
# - `({ ['foo']: function*() {} })` ........ `generator method 'foo'`
# - `({ [foo]: function*() {} })` .......... `generator method`
# - `({ *foo() {} })` ...................... `generator method 'foo'`
# - `({ foo: async function foo() {} })` ... `async method 'foo'`
# - `({ foo: async function() {} })` ....... `async method 'foo'`
# - `({ ['foo']: async function() {} })` ... `async method 'foo'`
# - `({ [foo]: async function() {} })` ..... `async method`
# - `({ async foo() {} })` ................. `async method 'foo'`
# - `({ get foo() {} })` ................... `getter 'foo'`
# - `({ set foo(a) {} })` .................. `setter 'foo'`
# - `class A { constructor() {} }` ......... `constructor`
# - `class A { foo() {} }` ................. `method 'foo'`
# - `class A { *foo() {} }` ................ `generator method 'foo'`
# - `class A { async foo() {} }` ........... `async method 'foo'`
# - `class A { ['foo']() {} }` ............. `method 'foo'`
# - `class A { *['foo']() {} }` ............ `generator method 'foo'`
# - `class A { async ['foo']() {} }` ....... `async method 'foo'`
# - `class A { [foo]() {} }` ............... `method`
# - `class A { *[foo]() {} }` .............. `generator method`
# - `class A { async [foo]() {} }` ......... `async method`
# - `class A { get foo() {} }` ............. `getter 'foo'`
# - `class A { set foo(a) {} }` ............ `setter 'foo'`
# - `class A { static foo() {} }` .......... `static method 'foo'`
# - `class A { static *foo() {} }` ......... `static generator method 'foo'`
# - `class A { static async foo() {} }` .... `static async method 'foo'`
# - `class A { static get foo() {} }` ...... `static getter 'foo'`
# - `class A { static set foo(a) {} }` ..... `static setter 'foo'`
#
# @param {ASTNode} node - The function node to get.
# @returns {string} The name and kind of the function node.
###
getFunctionNameWithKind = (node) ->
{parent} = node
tokens = []
if parent.type is 'MethodDefinition' and parent.static
tokens.push 'static'
if node.async then tokens.push 'async'
if node.generator then tokens.push 'generator'
if node.type is 'ArrowFunctionExpression'
tokens.push 'arrow', 'function'
else if parent.type in ['Property', 'MethodDefinition']
return 'constructor' if parent.kind is 'constructor'
if parent.kind is 'get'
tokens.push 'getter'
else if parent.kind is 'set'
tokens.push 'setter'
else
tokens.push 'method'
else
tokens.push 'function'
name = getFunctionName node
name ?= getStaticPropertyName parent
tokens.push "'#{name}'" if name
tokens.join ' '
isIife = (func) ->
return no unless func?.type is 'FunctionExpression'
return yes if (
func.parent.type is 'UnaryExpression' and func.parent.operator is 'do'
)
return yes if (
func.parent.type is 'CallExpression' and func.parent.callee is func
)
no
hasIndentedLastLine = ({node, sourceCode}) ->
return no unless node.loc.start.line < node.loc.end.line
lastLineText =
sourceCode.getText()[(node.range[1] - node.loc.end.column)...node.range[1]]
match = /^\s+/.exec lastLineText
return no unless match
lastLineIndent = match[0]
lastLineIndent.length + 1 > node.loc.start.column
containsDeclaration = (node) ->
switch node?.type
when 'Identifier'
node.declaration
when 'ObjectPattern'
for prop in node.properties
return yes if containsDeclaration prop
no
when 'Property'
containsDeclaration node.value
when 'RestElement'
containsDeclaration node.argument
when 'ArrayPattern'
for element in node.elements
return yes if containsDeclaration element
no
when 'AssignmentPattern'
containsDeclaration node.left
isDeclarationAssignment = (node) ->
return no unless node?.type is 'AssignmentExpression'
containsDeclaration node.left
getDeclarationAssignmentAncestor = (node) ->
current = node
while current
switch current?.type
when 'AssignmentExpression'
return current
when 'ObjectPattern', 'ArrayPattern', 'Property', 'RestElement', 'Identifier', 'AssignmentPattern'
current = current.parent
else
return null
null
isFatArrowFunction = (node) ->
return unless node?
{bound, type, parent} = node
type is 'ArrowFunctionExpression' or
bound or
(parent?.type is 'MethodDefinition' and parent.bound)
isBoundMethod = (node) ->
return unless node?
{parent} = node
parent?.type is 'MethodDefinition' and parent.bound
convertCommentToJsStyleJsdoc = (comment) ->
comment.replace /^(\s*)#/gm, '$1*'
module.exports = {
getPrecedence
isInLoop
getFunctionName
getFunctionNameWithKind
isIife
hasIndentedLastLine
isDeclarationAssignment
isFatArrowFunction
isBoundMethod
convertCommentToJsStyleJsdoc
getDeclarationAssignmentAncestor
}
| true | ###*
# @fileoverview Common utils for AST.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
astUtils = require '../eslint-ast-utils'
{getStaticPropertyName} = astUtils
anyLoopPattern = /^WhileStatement|For$/
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# Get the precedence level based on the node type
# @param {ASTNode} node node to evaluate
# @returns {int} precedence level
# @private
###
getPrecedence = (node) ->
switch node.type
# when 'SequenceExpression'
# return 0
when 'AssignmentExpression' # ,'ArrowFunctionExpression', 'YieldExpression'
return 1
# when 'ConditionalExpression'
# return 3
when 'LogicalExpression'
switch node.operator
when '?'
return 3
when '||', 'or'
return 4
when '&&', 'and'
return 5
# no default
### falls through ###
when 'BinaryExpression'
switch node.operator
when '|'
return 6
when '^'
return 7
when '&'
return 8
when '==', '!=', '===', '!=='
return 9
when '<', '<=', '>', '>=', 'in', 'instanceof'
return 10
when '<<', '>>', '>>>'
return 11
when '+', '-'
return 12
when '*', '/', '%'
return 13
when '**'
return 15
# no default
### falls through ###
# when 'UnaryExpression', 'AwaitExpression'
# return 16
# when 'UpdateExpression'
# return 17
# when 'CallExpression'
# return 18
# when 'NewExpression'
# return 19
# else
# return 20
isLoop = (node) ->
!!(node and anyLoopPattern.test node.type)
isInLoop = (node) ->
currentNode = node
while currentNode and not astUtils.isFunction currentNode
return yes if isLoop currentNode
currentNode = currentNode.parent
no
getFunctionName = (node) ->
return null unless (
node?.type is 'FunctionExpression' and
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
node.parent.left.name
###*
# Gets the name and kind of the given function node.
#
# - `function foo() {}` .................... `function 'foo'`
# - `(function foo() {})` .................. `function 'foo'`
# - `(function() {})` ...................... `function`
# - `function* foo() {}` ................... `generator function 'foo'`
# - `(function* foo() {})` ................. `generator function 'foo'`
# - `(function*() {})` ..................... `generator function`
# - `() => {}` ............................. `arrow function`
# - `async () => {}` ....................... `async arrow function`
# - `({ foo: function foo() {} })` ......... `method 'foo'`
# - `({ foo: function() {} })` ............. `method 'foo'`
# - `({ ['foo']: function() {} })` ......... `method 'foo'`
# - `({ [foo]: function() {} })` ........... `method`
# - `({ foo() {} })` ....................... `method 'foo'`
# - `({ foo: function* foo() {} })` ........ `generator method 'foo'`
# - `({ foo: function*() {} })` ............ `generator method 'foo'`
# - `({ ['foo']: function*() {} })` ........ `generator method 'foo'`
# - `({ [foo]: function*() {} })` .......... `generator method`
# - `({ *foo() {} })` ...................... `generator method 'foo'`
# - `({ foo: async function foo() {} })` ... `async method 'foo'`
# - `({ foo: async function() {} })` ....... `async method 'foo'`
# - `({ ['foo']: async function() {} })` ... `async method 'foo'`
# - `({ [foo]: async function() {} })` ..... `async method`
# - `({ async foo() {} })` ................. `async method 'foo'`
# - `({ get foo() {} })` ................... `getter 'foo'`
# - `({ set foo(a) {} })` .................. `setter 'foo'`
# - `class A { constructor() {} }` ......... `constructor`
# - `class A { foo() {} }` ................. `method 'foo'`
# - `class A { *foo() {} }` ................ `generator method 'foo'`
# - `class A { async foo() {} }` ........... `async method 'foo'`
# - `class A { ['foo']() {} }` ............. `method 'foo'`
# - `class A { *['foo']() {} }` ............ `generator method 'foo'`
# - `class A { async ['foo']() {} }` ....... `async method 'foo'`
# - `class A { [foo]() {} }` ............... `method`
# - `class A { *[foo]() {} }` .............. `generator method`
# - `class A { async [foo]() {} }` ......... `async method`
# - `class A { get foo() {} }` ............. `getter 'foo'`
# - `class A { set foo(a) {} }` ............ `setter 'foo'`
# - `class A { static foo() {} }` .......... `static method 'foo'`
# - `class A { static *foo() {} }` ......... `static generator method 'foo'`
# - `class A { static async foo() {} }` .... `static async method 'foo'`
# - `class A { static get foo() {} }` ...... `static getter 'foo'`
# - `class A { static set foo(a) {} }` ..... `static setter 'foo'`
#
# @param {ASTNode} node - The function node to get.
# @returns {string} The name and kind of the function node.
###
getFunctionNameWithKind = (node) ->
{parent} = node
tokens = []
if parent.type is 'MethodDefinition' and parent.static
tokens.push 'static'
if node.async then tokens.push 'async'
if node.generator then tokens.push 'generator'
if node.type is 'ArrowFunctionExpression'
tokens.push 'arrow', 'function'
else if parent.type in ['Property', 'MethodDefinition']
return 'constructor' if parent.kind is 'constructor'
if parent.kind is 'get'
tokens.push 'getter'
else if parent.kind is 'set'
tokens.push 'setter'
else
tokens.push 'method'
else
tokens.push 'function'
name = getFunctionName node
name ?= getStaticPropertyName parent
tokens.push "'#{name}'" if name
tokens.join ' '
isIife = (func) ->
return no unless func?.type is 'FunctionExpression'
return yes if (
func.parent.type is 'UnaryExpression' and func.parent.operator is 'do'
)
return yes if (
func.parent.type is 'CallExpression' and func.parent.callee is func
)
no
hasIndentedLastLine = ({node, sourceCode}) ->
return no unless node.loc.start.line < node.loc.end.line
lastLineText =
sourceCode.getText()[(node.range[1] - node.loc.end.column)...node.range[1]]
match = /^\s+/.exec lastLineText
return no unless match
lastLineIndent = match[0]
lastLineIndent.length + 1 > node.loc.start.column
containsDeclaration = (node) ->
switch node?.type
when 'Identifier'
node.declaration
when 'ObjectPattern'
for prop in node.properties
return yes if containsDeclaration prop
no
when 'Property'
containsDeclaration node.value
when 'RestElement'
containsDeclaration node.argument
when 'ArrayPattern'
for element in node.elements
return yes if containsDeclaration element
no
when 'AssignmentPattern'
containsDeclaration node.left
isDeclarationAssignment = (node) ->
return no unless node?.type is 'AssignmentExpression'
containsDeclaration node.left
getDeclarationAssignmentAncestor = (node) ->
current = node
while current
switch current?.type
when 'AssignmentExpression'
return current
when 'ObjectPattern', 'ArrayPattern', 'Property', 'RestElement', 'Identifier', 'AssignmentPattern'
current = current.parent
else
return null
null
isFatArrowFunction = (node) ->
return unless node?
{bound, type, parent} = node
type is 'ArrowFunctionExpression' or
bound or
(parent?.type is 'MethodDefinition' and parent.bound)
isBoundMethod = (node) ->
return unless node?
{parent} = node
parent?.type is 'MethodDefinition' and parent.bound
convertCommentToJsStyleJsdoc = (comment) ->
comment.replace /^(\s*)#/gm, '$1*'
module.exports = {
getPrecedence
isInLoop
getFunctionName
getFunctionNameWithKind
isIife
hasIndentedLastLine
isDeclarationAssignment
isFatArrowFunction
isBoundMethod
convertCommentToJsStyleJsdoc
getDeclarationAssignmentAncestor
}
|
[
{
"context": " 'owner': true,\n 'username': 'turunc'\n }\n ],\n 'groups': [\n ",
"end": 994,
"score": 0.999693751335144,
"start": 988,
"tag": "USERNAME",
"value": "turunc"
},
{
"context": " 'owner': true,\n 'username'... | client/mocks/mock.jcomputestack.coffee | ezgikaysi/koding | 1 | module.exports = {
'watchers': {},
'bongo_': {
'constructorName': 'JComputeStack',
'instanceId': '6a09ee09672e773b077b27a18385b702'
},
'title': 'My Stack',
'originId': '569e54e73577d1b63864cc9f',
'group': 'turunc-t-38',
'baseStackId': '56aa08f0a6e74bce51cc885f',
'stackRevision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14',
'machines': [
{
'id': 'kd-1217',
'options': {},
'data': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'_e': {
'newListener': [],
'error': [
null
],
'ready': [
null
]
},
'_maxListeners': 10,
'jMachine': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'label': 'mymachine_1',
'_id': '56aa1329a6e74bce51cc886e',
'provisioners': [],
'provider': 'aws',
'credential': '2f49083619c06a9f0a6039df83907748',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'slug': 'mymachine-1',
'alwaysOn': false,
'fs': {}
}
],
'config': {
'requiredData': {
'user': [
'username'
],
'group': [
'slug'
]
},
'requiredProviders': [
'aws',
'koding'
],
'verified': true,
'groupStack': true
},
'meta': {
'data': {
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'credentials': {
'aws': [
'2f49083619c06a9f0a6039df83907748'
]
},
'status': {
'state': 'NotInitialized'
},
'_id': '56aa1329a6e74bce51cc886d',
'_revisionStatus': {
'error': null,
'status': {
'message': 'Base stack template is same',
'code': 0
}
}
}
| 33917 | module.exports = {
'watchers': {},
'bongo_': {
'constructorName': 'JComputeStack',
'instanceId': '6a09ee09672e773b077b27a18385b702'
},
'title': 'My Stack',
'originId': '569e54e73577d1b63864cc9f',
'group': 'turunc-t-38',
'baseStackId': '56aa08f0a6e74bce51cc885f',
'stackRevision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14',
'machines': [
{
'id': 'kd-1217',
'options': {},
'data': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'_e': {
'newListener': [],
'error': [
null
],
'ready': [
null
]
},
'_maxListeners': 10,
'jMachine': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'label': 'mymachine_1',
'_id': '56aa1329a6e74bce51cc886e',
'provisioners': [],
'provider': 'aws',
'credential': '<KEY>',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'slug': 'mymachine-1',
'alwaysOn': false,
'fs': {}
}
],
'config': {
'requiredData': {
'user': [
'username'
],
'group': [
'slug'
]
},
'requiredProviders': [
'aws',
'koding'
],
'verified': true,
'groupStack': true
},
'meta': {
'data': {
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'credentials': {
'aws': [
'2<KEY>'
]
},
'status': {
'state': 'NotInitialized'
},
'_id': '56aa1329a6e74bce51cc886d',
'_revisionStatus': {
'error': null,
'status': {
'message': 'Base stack template is same',
'code': 0
}
}
}
| true | module.exports = {
'watchers': {},
'bongo_': {
'constructorName': 'JComputeStack',
'instanceId': '6a09ee09672e773b077b27a18385b702'
},
'title': 'My Stack',
'originId': '569e54e73577d1b63864cc9f',
'group': 'turunc-t-38',
'baseStackId': '56aa08f0a6e74bce51cc885f',
'stackRevision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14',
'machines': [
{
'id': 'kd-1217',
'options': {},
'data': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'_e': {
'newListener': [],
'error': [
null
],
'ready': [
null
]
},
'_maxListeners': 10,
'jMachine': {
'watchers': {},
'bongo_': {
'constructorName': 'JMachine',
'instanceId': 'cf27e4f8a927e3af48bc89297472a380'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'provider': 'aws',
'label': 'mymachine_1',
'slug': 'mymachine-1',
'provisioners': [],
'credential': '2f49083619c06a9f0a6039df83907748',
'users': [
{
'id': '569e54e73577d1b63864cc9e',
'sudo': true,
'owner': true,
'username': 'turunc'
}
],
'groups': [
{
'id': '56aa03f83a7307cd51c34047'
}
],
'createdAt': '2016-01-28T13:10:01.910Z',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'meta': {
'type': 'aws',
'region': 'us-east-1',
'instance_type': 't2.nano',
'storage_size': 8,
'assignedLabel': 'mymachine_1'
},
'assignee': {
'inProgress': false,
'assignedAt': '2016-01-28T13:10:01.910Z'
},
'generatedFrom': {
'templateId': '56aa08f0a6e74bce51cc885f',
'revision': 'a58e8b8f51dc70e5f7316dccacfb8a05eaad9b14'
},
'_id': '56aa1329a6e74bce51cc886e'
},
'label': 'mymachine_1',
'_id': '56aa1329a6e74bce51cc886e',
'provisioners': [],
'provider': 'aws',
'credential': 'PI:KEY:<KEY>END_PI',
'status': {
'state': 'NotInitialized',
'modifiedAt': '2016-01-28T13:10:01.910Z'
},
'uid': 'utta2eac5ad0',
'domain': 'utta2eac5ad0.turunc',
'slug': 'mymachine-1',
'alwaysOn': false,
'fs': {}
}
],
'config': {
'requiredData': {
'user': [
'username'
],
'group': [
'slug'
]
},
'requiredProviders': [
'aws',
'koding'
],
'verified': true,
'groupStack': true
},
'meta': {
'data': {
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'createdAt': '2016-01-28T13:10:01.889Z',
'modifiedAt': '2016-01-28T13:10:01.889Z',
'likes': 0
},
'credentials': {
'aws': [
'2PI:KEY:<KEY>END_PI'
]
},
'status': {
'state': 'NotInitialized'
},
'_id': '56aa1329a6e74bce51cc886d',
'_revisionStatus': {
'error': null,
'status': {
'message': 'Base stack template is same',
'code': 0
}
}
}
|
[
{
"context": "###\n * promise 信息\n * @author jackie Lin <dashi_lin@163.com>\n * @date 2016-3-9\n###\n'use st",
"end": 39,
"score": 0.999794602394104,
"start": 29,
"tag": "NAME",
"value": "jackie Lin"
},
{
"context": "###\n * promise 信息\n * @author jackie Lin <dashi_lin@163.com>\n * @... | promise.coffee | JackieLin/promise | 3 | ###
* promise 信息
* @author jackie Lin <dashi_lin@163.com>
* @date 2016-3-9
###
'use strict'
((window) ->
###
* promise list
###
promises = []
Promise = (cb=->)->
@.init()
@._cb = cb
###
* 0 - pending
* 1 - fulfilled with _value
* 2 - rejected with _value
* 3 - adopted the state of another promise, _value
###
@._status = 0
@._value = null
# then list
@._deferred = []
# 是否异步加载(false: 同步)
@._async = false
# 入栈操作
promises.push @
@._cb.apply null, [@.resolve.bind(@), @.reject.bind(@)]
###
* 绑定方法到对应的上下文
###
bind = (context=@)->
throw new TypeError '绑定对象应该是一个方法' if @ isnt 'function'
aArgs = Array.prototype.slice.call arguments, 1
fToBind = @
warpperFunc = ->
fToBind.apply context, aArgs.concat Array.prototype.slice.call arguments
warpperFunc
###
* 初始化操作
###
Promise::init = ->
@._status = 0
@._value = null
Function.prototype.bind = bind if not Function.prototype.bind
###
* 执行 then 队列
###
Promise::resolve = (res)->
@._status = 1
@._value = res
@.run()
# 异步,跳转到上一个 promise
@.next()
@
Promise::reject = (res)->
@._status = 2
@._value = res
@
###
* 抛异常处理
###
Promise::fail = (cb)->
return false if @._status isnt 2
@._value = cb.apply @, [@._value]
@
###
* 下一步控制
###
Promise::next = ->
# 没有 done 结束,同步到上一层
@.notify() if not @._done and promises.length and not @._deferred.length
###
* 执行 then 方法
###
Promise::run = ->
@.doThen()
if @._done
@.doDone @._done
###
* then 方法
###
Promise::then = (cb)->
if @._status in [0, 3]
@._async = true
@._deferred.push cb
# resolve 已经触发, 直接执行 then 方法
if @._status is 1
@._async = false
@.handleThen cb
@
###
* 执行 then
###
Promise::doThen = ->
if not @._deferred.length
return
if @._deferred.length and @._status is 1
@handleThen @._deferred.shift(), =>
@.doThen()
###
* 执行回调方法
###
Promise::handleThen = (func, callback=->)->
_value = func.apply @, [@._value]
# promise 异步加载
if _value instanceof Promise and _value._async
@._status = 3
# promise 同步加载
else if _value instanceof Promise and not _value._async
@._value = _value._value
# 弹出最后一个
promises.pop()
callback.apply @
else
@._value = _value
callback.apply @
###
* 将对应的值同步到上一个空间
###
Promise::notify = ->
prev = getPrevPromise()
if prev
prev._value = @._value
# 删除掉最后一个
promises.pop()
prev._status = 1
prev.run()
###
* 获取上一个 promise 列表
###
getPrevPromise = ->
length = promises.length
prev = null
if length > 1
prev = promises[length - 2]
prev
Promise::doDone = (done)->
@._value = done.apply @, [@._value]
@.notify()
promises.pop()
if promises.length
prev = promises[promises.length - 1]
# 重置状态
prev._status = 1
prev.run()
###
* 结束
###
Promise::done = (cb)->
if @._status in [0, 3]
@._done = cb
if @._status is 1
@.doThen()
@.doDone cb
@
# amd
return window.Promise = Promise if not window.define
if window.define
window.define ->
Promise
) window
| 14376 | ###
* promise 信息
* @author <NAME> <<EMAIL>>
* @date 2016-3-9
###
'use strict'
((window) ->
###
* promise list
###
promises = []
Promise = (cb=->)->
@.init()
@._cb = cb
###
* 0 - pending
* 1 - fulfilled with _value
* 2 - rejected with _value
* 3 - adopted the state of another promise, _value
###
@._status = 0
@._value = null
# then list
@._deferred = []
# 是否异步加载(false: 同步)
@._async = false
# 入栈操作
promises.push @
@._cb.apply null, [@.resolve.bind(@), @.reject.bind(@)]
###
* 绑定方法到对应的上下文
###
bind = (context=@)->
throw new TypeError '绑定对象应该是一个方法' if @ isnt 'function'
aArgs = Array.prototype.slice.call arguments, 1
fToBind = @
warpperFunc = ->
fToBind.apply context, aArgs.concat Array.prototype.slice.call arguments
warpperFunc
###
* 初始化操作
###
Promise::init = ->
@._status = 0
@._value = null
Function.prototype.bind = bind if not Function.prototype.bind
###
* 执行 then 队列
###
Promise::resolve = (res)->
@._status = 1
@._value = res
@.run()
# 异步,跳转到上一个 promise
@.next()
@
Promise::reject = (res)->
@._status = 2
@._value = res
@
###
* 抛异常处理
###
Promise::fail = (cb)->
return false if @._status isnt 2
@._value = cb.apply @, [@._value]
@
###
* 下一步控制
###
Promise::next = ->
# 没有 done 结束,同步到上一层
@.notify() if not @._done and promises.length and not @._deferred.length
###
* 执行 then 方法
###
Promise::run = ->
@.doThen()
if @._done
@.doDone @._done
###
* then 方法
###
Promise::then = (cb)->
if @._status in [0, 3]
@._async = true
@._deferred.push cb
# resolve 已经触发, 直接执行 then 方法
if @._status is 1
@._async = false
@.handleThen cb
@
###
* 执行 then
###
Promise::doThen = ->
if not @._deferred.length
return
if @._deferred.length and @._status is 1
@handleThen @._deferred.shift(), =>
@.doThen()
###
* 执行回调方法
###
Promise::handleThen = (func, callback=->)->
_value = func.apply @, [@._value]
# promise 异步加载
if _value instanceof Promise and _value._async
@._status = 3
# promise 同步加载
else if _value instanceof Promise and not _value._async
@._value = _value._value
# 弹出最后一个
promises.pop()
callback.apply @
else
@._value = _value
callback.apply @
###
* 将对应的值同步到上一个空间
###
Promise::notify = ->
prev = getPrevPromise()
if prev
prev._value = @._value
# 删除掉最后一个
promises.pop()
prev._status = 1
prev.run()
###
* 获取上一个 promise 列表
###
getPrevPromise = ->
length = promises.length
prev = null
if length > 1
prev = promises[length - 2]
prev
Promise::doDone = (done)->
@._value = done.apply @, [@._value]
@.notify()
promises.pop()
if promises.length
prev = promises[promises.length - 1]
# 重置状态
prev._status = 1
prev.run()
###
* 结束
###
Promise::done = (cb)->
if @._status in [0, 3]
@._done = cb
if @._status is 1
@.doThen()
@.doDone cb
@
# amd
return window.Promise = Promise if not window.define
if window.define
window.define ->
Promise
) window
| true | ###
* promise 信息
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* @date 2016-3-9
###
'use strict'
((window) ->
###
* promise list
###
promises = []
Promise = (cb=->)->
@.init()
@._cb = cb
###
* 0 - pending
* 1 - fulfilled with _value
* 2 - rejected with _value
* 3 - adopted the state of another promise, _value
###
@._status = 0
@._value = null
# then list
@._deferred = []
# 是否异步加载(false: 同步)
@._async = false
# 入栈操作
promises.push @
@._cb.apply null, [@.resolve.bind(@), @.reject.bind(@)]
###
* 绑定方法到对应的上下文
###
bind = (context=@)->
throw new TypeError '绑定对象应该是一个方法' if @ isnt 'function'
aArgs = Array.prototype.slice.call arguments, 1
fToBind = @
warpperFunc = ->
fToBind.apply context, aArgs.concat Array.prototype.slice.call arguments
warpperFunc
###
* 初始化操作
###
Promise::init = ->
@._status = 0
@._value = null
Function.prototype.bind = bind if not Function.prototype.bind
###
* 执行 then 队列
###
Promise::resolve = (res)->
@._status = 1
@._value = res
@.run()
# 异步,跳转到上一个 promise
@.next()
@
Promise::reject = (res)->
@._status = 2
@._value = res
@
###
* 抛异常处理
###
Promise::fail = (cb)->
return false if @._status isnt 2
@._value = cb.apply @, [@._value]
@
###
* 下一步控制
###
Promise::next = ->
# 没有 done 结束,同步到上一层
@.notify() if not @._done and promises.length and not @._deferred.length
###
* 执行 then 方法
###
Promise::run = ->
@.doThen()
if @._done
@.doDone @._done
###
* then 方法
###
Promise::then = (cb)->
if @._status in [0, 3]
@._async = true
@._deferred.push cb
# resolve 已经触发, 直接执行 then 方法
if @._status is 1
@._async = false
@.handleThen cb
@
###
* 执行 then
###
Promise::doThen = ->
if not @._deferred.length
return
if @._deferred.length and @._status is 1
@handleThen @._deferred.shift(), =>
@.doThen()
###
* 执行回调方法
###
Promise::handleThen = (func, callback=->)->
_value = func.apply @, [@._value]
# promise 异步加载
if _value instanceof Promise and _value._async
@._status = 3
# promise 同步加载
else if _value instanceof Promise and not _value._async
@._value = _value._value
# 弹出最后一个
promises.pop()
callback.apply @
else
@._value = _value
callback.apply @
###
* 将对应的值同步到上一个空间
###
Promise::notify = ->
prev = getPrevPromise()
if prev
prev._value = @._value
# 删除掉最后一个
promises.pop()
prev._status = 1
prev.run()
###
* 获取上一个 promise 列表
###
getPrevPromise = ->
length = promises.length
prev = null
if length > 1
prev = promises[length - 2]
prev
Promise::doDone = (done)->
@._value = done.apply @, [@._value]
@.notify()
promises.pop()
if promises.length
prev = promises[promises.length - 1]
# 重置状态
prev._status = 1
prev.run()
###
* 结束
###
Promise::done = (cb)->
if @._status in [0, 3]
@._done = cb
if @._status is 1
@.doThen()
@.doDone cb
@
# amd
return window.Promise = Promise if not window.define
if window.define
window.define ->
Promise
) window
|
[
{
"context": "_PAGE_SIZE\n 'reverse': false\n 'memberKey': 'member'\n 'rankKey': 'rank'\n 'scoreKey': 'score'\n ",
"end": 438,
"score": 0.8852642178535461,
"start": 432,
"tag": "KEY",
"value": "member"
},
{
"context": ": false\n 'memberKey': 'member'\n 'rankKey': '... | src/tie_ranking_leaderboard.coffee | KeKs0r/leaderboard-coffeescript | 22 | Leaderboard = require './leaderboard'
class TieRankingLeaderboard extends Leaderboard
###
# Default page size: 25
###
@DEFAULT_PAGE_SIZE = 25
###
# Default options when creating a leaderboard. Page size is 25 and reverse
# is set to false, meaning various methods will return results in
# highest-to-lowest order.
###
DEFAULT_OPTIONS =
'pageSize': @DEFAULT_PAGE_SIZE
'reverse': false
'memberKey': 'member'
'rankKey': 'rank'
'scoreKey': 'score'
'memberDataKey': 'member_data'
'memberDataNamespace': 'member_data'
'tiesNamespace': 'ties'
###
# Default Redis host: localhost
###
@DEFAULT_REDIS_HOST = 'localhost'
###
# Default Redis post: 6379
###
@DEFAULT_REDIS_PORT = 6379
###
# Default Redis options when creating a connection to Redis. The
# +DEFAULT_REDIS_HOST+ and +DEFAULT_REDIS_PORT+ will be passed.
###
DEFAULT_REDIS_OPTIONS =
'host': @DEFAULT_REDIS_HOST
'port': @DEFAULT_REDIS_PORT
constructor: (leaderboardName, options = DEFAULT_OPTIONS, redisOptions = DEFAULT_REDIS_OPTIONS) ->
super
@tiesNamespace = options['tiesNamespace'] || 'ties'
###
# Delete the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param callback Optional callback for result of call.
###
deleteLeaderboardNamed: (leaderboardName, callback) ->
transaction = @redisConnection.multi()
transaction.del(leaderboardName)
transaction.del(this.memberDataKey(leaderboardName))
transaction.del(this.tiesLeaderboardKey(leaderboardName))
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Change the score for a member in the named leaderboard by a delta which can be positive or negative.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param delta [float] Score change.
# @param callback Optional callback for result of call.
###
changeScoreForMemberIn: (leaderboardName, member, delta, callback) ->
this.scoreFor(member, (score) =>
newScore = score + delta
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, totalMembers) =>
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, newScore, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), newScore, newScore)
transaction.exec((err, reply) =>
if totalMembers.length == 1
@redisConnection.zrem(this.tiesLeaderboardKey(leaderboardName), score)
callback(reply) if callback
)
)
)
###
# Rank a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param score [float] Member score.
# @param memberData [String] Optional member data.
# @param callback Optional callback for result of call.
###
rankMemberIn: (leaderboardName, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank a member across multiple leaderboards.
#
# @param leaderboards [Array] Leaderboard names.
# @param member [String] Member name.
# @param score [float] Member score.
# @param member_data [String] Optional member data.
###
rankMemberAcross: (leaderboardNames, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
for leaderboardName in leaderboardNames
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank an array of members in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param membersAndScores [Array] Variable list of members and scores
# @param callback Optional callback for result of call.
###
rankMembersIn: (leaderboardName, membersAndScores, callback) ->
transaction = @redisConnection.multi()
for index in [0...membersAndScores.length] by 2
slice = membersAndScores[index...index + 2]
transaction.zadd(leaderboardName, slice[1], slice[0])
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), slice[0], slice[0])
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Remove a member from the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Optional callback for result of call.
###
removeMemberFrom: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if score?
if @reverse
@redisConnection.zrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
callback(null) if callback
)
###
# Retrieve the rank for a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the rank for a member in the leaderboard.
###
rankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
)
###
# Retrieve the score and rank for a member in the named leaderboard.
#
# @param leaderboardName [String]Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the score and rank for a member in the named leaderboard as a Hash.
###
scoreAndRankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, memberScore) =>
transaction = @redisConnection.multi()
transaction.zscore(leaderboardName, member)
if @reverse
transaction.zrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
else
transaction.zrevrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
transaction.exec((err, replies) =>
if replies
scoreAndRankData = {}
if replies[0]?
scoreAndRankData[@scoreKeyOption] = parseFloat(replies[0])
else
scoreAndRankData[@scoreKeyOption] = null
if replies[1]?
scoreAndRankData[@rankKeyOption] = replies[1] + 1
else
scoreAndRankData[@rankKeyOption] = null
scoreAndRankData[@memberKeyOption] = member
callback(scoreAndRankData)))
###
# Remove members from the named leaderboard in a given score range.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param minScore [float] Minimum score.
# @param maxScore [float] Maximum score.
# @param callback Optional callback for result of call.
###
removeMembersInScoreRangeIn: (leaderboardName, minScore, maxScore, callback) ->
transaction = @redisConnection.multi()
transaction.zremrangebyscore(leaderboardName, minScore, maxScore)
transaction.zremrangebyscore(this.tiesLeaderboardKey(leaderboardName), minScore, maxScore)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard in a set number of seconds. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param seconds [int] Number of seconds after which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardFor: (leaderboardName, seconds, callback) ->
transaction = @redisConnection.multi()
transaction.expire(leaderboardName, seconds)
transaction.expire(this.tiesLeaderboardKey(leaderboardName), seconds)
transaction.expire(this.memberDataKey(leaderboardName), seconds)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard at a specific UNIX timestamp. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param timestamp [int] UNIX timestamp at which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardAtFor: (leaderboardName, timestamp, callback) ->
transaction = @redisConnection.multi()
transaction.expireat(leaderboardName, timestamp)
transaction.expireat(this.tiesLeaderboardKey(leaderboardName), timestamp)
transaction.expireat(this.memberDataKey(leaderboardName), timestamp)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Retrieve a page of leaders from the named leaderboard for a given list of members.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param members [Array] Member names.
# @param options [Hash] Options to be used when retrieving the page from the named leaderboard.
# @param callback Callback for result of call.
#
# @return a page of leaders from the named leaderboard for a given list of members.
###
rankedInListIn: (leaderboardName, members, options = {}, callback) ->
if not members? or members.length == 0
return callback([])
ranksForMembers = []
transaction = @redisConnection.multi()
unless options['membersOnly']
for member in members
if @reverse
transaction.zrank(leaderboardName, member)
else
transaction.zrevrank(leaderboardName, member)
transaction.zscore(leaderboardName, member)
transaction.exec((err, replies) =>
for member, index in members
do (member) =>
data = {}
data[@memberKeyOption] = member
unless options['membersOnly']
if replies[index * 2 + 1]
data[@scoreKeyOption] = parseFloat(replies[index * 2 + 1])
else
data[@scoreKeyOption] = null
data[@rankKeyOption] = null
# Retrieve optional member data based on options['withMemberData']
if options['withMemberData']
this.memberDataForIn leaderboardName, member, (memberdata) =>
data[@memberDataKeyOption] = memberdata
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
)
tiesLeaderboardKey: (leaderboardName) ->
"#{leaderboardName}:#{@tiesNamespace}"
module.exports = TieRankingLeaderboard
| 77320 | Leaderboard = require './leaderboard'
class TieRankingLeaderboard extends Leaderboard
###
# Default page size: 25
###
@DEFAULT_PAGE_SIZE = 25
###
# Default options when creating a leaderboard. Page size is 25 and reverse
# is set to false, meaning various methods will return results in
# highest-to-lowest order.
###
DEFAULT_OPTIONS =
'pageSize': @DEFAULT_PAGE_SIZE
'reverse': false
'memberKey': '<KEY>'
'rankKey': '<KEY>'
'scoreKey': 'score'
'memberDataKey': '<KEY>'
'memberDataNamespace': 'member_data'
'tiesNamespace': 'ties'
###
# Default Redis host: localhost
###
@DEFAULT_REDIS_HOST = 'localhost'
###
# Default Redis post: 6379
###
@DEFAULT_REDIS_PORT = 6379
###
# Default Redis options when creating a connection to Redis. The
# +DEFAULT_REDIS_HOST+ and +DEFAULT_REDIS_PORT+ will be passed.
###
DEFAULT_REDIS_OPTIONS =
'host': @DEFAULT_REDIS_HOST
'port': @DEFAULT_REDIS_PORT
constructor: (leaderboardName, options = DEFAULT_OPTIONS, redisOptions = DEFAULT_REDIS_OPTIONS) ->
super
@tiesNamespace = options['tiesNamespace'] || 'ties'
###
# Delete the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param callback Optional callback for result of call.
###
deleteLeaderboardNamed: (leaderboardName, callback) ->
transaction = @redisConnection.multi()
transaction.del(leaderboardName)
transaction.del(this.memberDataKey(leaderboardName))
transaction.del(this.tiesLeaderboardKey(leaderboardName))
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Change the score for a member in the named leaderboard by a delta which can be positive or negative.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param delta [float] Score change.
# @param callback Optional callback for result of call.
###
changeScoreForMemberIn: (leaderboardName, member, delta, callback) ->
this.scoreFor(member, (score) =>
newScore = score + delta
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, totalMembers) =>
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, newScore, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), newScore, newScore)
transaction.exec((err, reply) =>
if totalMembers.length == 1
@redisConnection.zrem(this.tiesLeaderboardKey(leaderboardName), score)
callback(reply) if callback
)
)
)
###
# Rank a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param score [float] Member score.
# @param memberData [String] Optional member data.
# @param callback Optional callback for result of call.
###
rankMemberIn: (leaderboardName, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank a member across multiple leaderboards.
#
# @param leaderboards [Array] Leaderboard names.
# @param member [String] Member name.
# @param score [float] Member score.
# @param member_data [String] Optional member data.
###
rankMemberAcross: (leaderboardNames, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
for leaderboardName in leaderboardNames
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank an array of members in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param membersAndScores [Array] Variable list of members and scores
# @param callback Optional callback for result of call.
###
rankMembersIn: (leaderboardName, membersAndScores, callback) ->
transaction = @redisConnection.multi()
for index in [0...membersAndScores.length] by 2
slice = membersAndScores[index...index + 2]
transaction.zadd(leaderboardName, slice[1], slice[0])
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), slice[0], slice[0])
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Remove a member from the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Optional callback for result of call.
###
removeMemberFrom: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if score?
if @reverse
@redisConnection.zrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
callback(null) if callback
)
###
# Retrieve the rank for a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the rank for a member in the leaderboard.
###
rankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
)
###
# Retrieve the score and rank for a member in the named leaderboard.
#
# @param leaderboardName [String]Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the score and rank for a member in the named leaderboard as a Hash.
###
scoreAndRankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, memberScore) =>
transaction = @redisConnection.multi()
transaction.zscore(leaderboardName, member)
if @reverse
transaction.zrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
else
transaction.zrevrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
transaction.exec((err, replies) =>
if replies
scoreAndRankData = {}
if replies[0]?
scoreAndRankData[@scoreKeyOption] = parseFloat(replies[0])
else
scoreAndRankData[@scoreKeyOption] = null
if replies[1]?
scoreAndRankData[@rankKeyOption] = replies[1] + 1
else
scoreAndRankData[@rankKeyOption] = null
scoreAndRankData[@memberKeyOption] = member
callback(scoreAndRankData)))
###
# Remove members from the named leaderboard in a given score range.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param minScore [float] Minimum score.
# @param maxScore [float] Maximum score.
# @param callback Optional callback for result of call.
###
removeMembersInScoreRangeIn: (leaderboardName, minScore, maxScore, callback) ->
transaction = @redisConnection.multi()
transaction.zremrangebyscore(leaderboardName, minScore, maxScore)
transaction.zremrangebyscore(this.tiesLeaderboardKey(leaderboardName), minScore, maxScore)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard in a set number of seconds. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param seconds [int] Number of seconds after which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardFor: (leaderboardName, seconds, callback) ->
transaction = @redisConnection.multi()
transaction.expire(leaderboardName, seconds)
transaction.expire(this.tiesLeaderboardKey(leaderboardName), seconds)
transaction.expire(this.memberDataKey(leaderboardName), seconds)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard at a specific UNIX timestamp. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param timestamp [int] UNIX timestamp at which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardAtFor: (leaderboardName, timestamp, callback) ->
transaction = @redisConnection.multi()
transaction.expireat(leaderboardName, timestamp)
transaction.expireat(this.tiesLeaderboardKey(leaderboardName), timestamp)
transaction.expireat(this.memberDataKey(leaderboardName), timestamp)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Retrieve a page of leaders from the named leaderboard for a given list of members.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param members [Array] Member names.
# @param options [Hash] Options to be used when retrieving the page from the named leaderboard.
# @param callback Callback for result of call.
#
# @return a page of leaders from the named leaderboard for a given list of members.
###
rankedInListIn: (leaderboardName, members, options = {}, callback) ->
if not members? or members.length == 0
return callback([])
ranksForMembers = []
transaction = @redisConnection.multi()
unless options['membersOnly']
for member in members
if @reverse
transaction.zrank(leaderboardName, member)
else
transaction.zrevrank(leaderboardName, member)
transaction.zscore(leaderboardName, member)
transaction.exec((err, replies) =>
for member, index in members
do (member) =>
data = {}
data[@memberKeyOption] = member
unless options['membersOnly']
if replies[index * 2 + 1]
data[@scoreKeyOption] = parseFloat(replies[index * 2 + 1])
else
data[@scoreKeyOption] = null
data[@rankKeyOption] = null
# Retrieve optional member data based on options['withMemberData']
if options['withMemberData']
this.memberDataForIn leaderboardName, member, (memberdata) =>
data[@memberDataKeyOption] = memberdata
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
)
tiesLeaderboardKey: (leaderboardName) ->
"#{leaderboardName}:#{@tiesNamespace}"
module.exports = TieRankingLeaderboard
| true | Leaderboard = require './leaderboard'
class TieRankingLeaderboard extends Leaderboard
###
# Default page size: 25
###
@DEFAULT_PAGE_SIZE = 25
###
# Default options when creating a leaderboard. Page size is 25 and reverse
# is set to false, meaning various methods will return results in
# highest-to-lowest order.
###
DEFAULT_OPTIONS =
'pageSize': @DEFAULT_PAGE_SIZE
'reverse': false
'memberKey': 'PI:KEY:<KEY>END_PI'
'rankKey': 'PI:KEY:<KEY>END_PI'
'scoreKey': 'score'
'memberDataKey': 'PI:KEY:<KEY>END_PI'
'memberDataNamespace': 'member_data'
'tiesNamespace': 'ties'
###
# Default Redis host: localhost
###
@DEFAULT_REDIS_HOST = 'localhost'
###
# Default Redis post: 6379
###
@DEFAULT_REDIS_PORT = 6379
###
# Default Redis options when creating a connection to Redis. The
# +DEFAULT_REDIS_HOST+ and +DEFAULT_REDIS_PORT+ will be passed.
###
DEFAULT_REDIS_OPTIONS =
'host': @DEFAULT_REDIS_HOST
'port': @DEFAULT_REDIS_PORT
constructor: (leaderboardName, options = DEFAULT_OPTIONS, redisOptions = DEFAULT_REDIS_OPTIONS) ->
super
@tiesNamespace = options['tiesNamespace'] || 'ties'
###
# Delete the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param callback Optional callback for result of call.
###
deleteLeaderboardNamed: (leaderboardName, callback) ->
transaction = @redisConnection.multi()
transaction.del(leaderboardName)
transaction.del(this.memberDataKey(leaderboardName))
transaction.del(this.tiesLeaderboardKey(leaderboardName))
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Change the score for a member in the named leaderboard by a delta which can be positive or negative.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param delta [float] Score change.
# @param callback Optional callback for result of call.
###
changeScoreForMemberIn: (leaderboardName, member, delta, callback) ->
this.scoreFor(member, (score) =>
newScore = score + delta
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, totalMembers) =>
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, newScore, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), newScore, newScore)
transaction.exec((err, reply) =>
if totalMembers.length == 1
@redisConnection.zrem(this.tiesLeaderboardKey(leaderboardName), score)
callback(reply) if callback
)
)
)
###
# Rank a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param score [float] Member score.
# @param memberData [String] Optional member data.
# @param callback Optional callback for result of call.
###
rankMemberIn: (leaderboardName, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank a member across multiple leaderboards.
#
# @param leaderboards [Array] Leaderboard names.
# @param member [String] Member name.
# @param score [float] Member score.
# @param member_data [String] Optional member data.
###
rankMemberAcross: (leaderboardNames, member, score, memberData = null, callback) ->
transaction = @redisConnection.multi()
for leaderboardName in leaderboardNames
transaction.zadd(leaderboardName, score, member)
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), score, score)
transaction.hset(this.memberDataKey(leaderboardName), member, memberData) if memberData?
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Rank an array of members in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param membersAndScores [Array] Variable list of members and scores
# @param callback Optional callback for result of call.
###
rankMembersIn: (leaderboardName, membersAndScores, callback) ->
transaction = @redisConnection.multi()
for index in [0...membersAndScores.length] by 2
slice = membersAndScores[index...index + 2]
transaction.zadd(leaderboardName, slice[1], slice[0])
transaction.zadd(this.tiesLeaderboardKey(leaderboardName), slice[0], slice[0])
transaction.exec((err, reply) ->
callback(reply) if callback)
###
# Remove a member from the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Optional callback for result of call.
###
removeMemberFrom: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if score?
if @reverse
@redisConnection.zrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
@redisConnection.zrevrangebyscore(leaderboardName, score, score, (err, members) =>
transaction = @redisConnection.multi()
transaction.zrem(leaderboardName, member)
transaction.zrem(this.tiesLeaderboardKey(leaderboardName), score) if members.length == 1
transaction.hdel(this.memberDataKey(leaderboardName), member)
transaction.exec((err, reply) =>
callback(reply) if callback
)
)
else
callback(null) if callback
)
###
# Retrieve the rank for a member in the named leaderboard.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the rank for a member in the leaderboard.
###
rankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, score) =>
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), score, (err, rank) =>
callback(rank + 1))
)
###
# Retrieve the score and rank for a member in the named leaderboard.
#
# @param leaderboardName [String]Name of the leaderboard.
# @param member [String] Member name.
# @param callback Callback for result of call.
#
# @return the score and rank for a member in the named leaderboard as a Hash.
###
scoreAndRankForIn: (leaderboardName, member, callback) ->
@redisConnection.zscore(leaderboardName, member, (err, memberScore) =>
transaction = @redisConnection.multi()
transaction.zscore(leaderboardName, member)
if @reverse
transaction.zrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
else
transaction.zrevrank(this.tiesLeaderboardKey(leaderboardName), memberScore)
transaction.exec((err, replies) =>
if replies
scoreAndRankData = {}
if replies[0]?
scoreAndRankData[@scoreKeyOption] = parseFloat(replies[0])
else
scoreAndRankData[@scoreKeyOption] = null
if replies[1]?
scoreAndRankData[@rankKeyOption] = replies[1] + 1
else
scoreAndRankData[@rankKeyOption] = null
scoreAndRankData[@memberKeyOption] = member
callback(scoreAndRankData)))
###
# Remove members from the named leaderboard in a given score range.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param minScore [float] Minimum score.
# @param maxScore [float] Maximum score.
# @param callback Optional callback for result of call.
###
removeMembersInScoreRangeIn: (leaderboardName, minScore, maxScore, callback) ->
transaction = @redisConnection.multi()
transaction.zremrangebyscore(leaderboardName, minScore, maxScore)
transaction.zremrangebyscore(this.tiesLeaderboardKey(leaderboardName), minScore, maxScore)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard in a set number of seconds. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param seconds [int] Number of seconds after which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardFor: (leaderboardName, seconds, callback) ->
transaction = @redisConnection.multi()
transaction.expire(leaderboardName, seconds)
transaction.expire(this.tiesLeaderboardKey(leaderboardName), seconds)
transaction.expire(this.memberDataKey(leaderboardName), seconds)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Expire the given leaderboard at a specific UNIX timestamp. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param timestamp [int] UNIX timestamp at which the leaderboard will be expired.
# @param callback Optional callback for result of call.
###
expireLeaderboardAtFor: (leaderboardName, timestamp, callback) ->
transaction = @redisConnection.multi()
transaction.expireat(leaderboardName, timestamp)
transaction.expireat(this.tiesLeaderboardKey(leaderboardName), timestamp)
transaction.expireat(this.memberDataKey(leaderboardName), timestamp)
transaction.exec((err, replies) ->
callback(replies) if callback)
###
# Retrieve a page of leaders from the named leaderboard for a given list of members.
#
# @param leaderboardName [String] Name of the leaderboard.
# @param members [Array] Member names.
# @param options [Hash] Options to be used when retrieving the page from the named leaderboard.
# @param callback Callback for result of call.
#
# @return a page of leaders from the named leaderboard for a given list of members.
###
rankedInListIn: (leaderboardName, members, options = {}, callback) ->
if not members? or members.length == 0
return callback([])
ranksForMembers = []
transaction = @redisConnection.multi()
unless options['membersOnly']
for member in members
if @reverse
transaction.zrank(leaderboardName, member)
else
transaction.zrevrank(leaderboardName, member)
transaction.zscore(leaderboardName, member)
transaction.exec((err, replies) =>
for member, index in members
do (member) =>
data = {}
data[@memberKeyOption] = member
unless options['membersOnly']
if replies[index * 2 + 1]
data[@scoreKeyOption] = parseFloat(replies[index * 2 + 1])
else
data[@scoreKeyOption] = null
data[@rankKeyOption] = null
# Retrieve optional member data based on options['withMemberData']
if options['withMemberData']
this.memberDataForIn leaderboardName, member, (memberdata) =>
data[@memberDataKeyOption] = memberdata
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
if @reverse
@redisConnection.zrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
else
@redisConnection.zrevrank(this.tiesLeaderboardKey(leaderboardName), data[@scoreKeyOption], (err, reply) =>
data[@rankKeyOption] = reply + 1
ranksForMembers.push(data)
# Sort if options['sortBy']
if ranksForMembers.length == members.length
switch options['sortBy']
when 'rank'
ranksForMembers.sort((a, b) ->
a.rank > b.rank)
when 'score'
ranksForMembers.sort((a, b) ->
a.score > b.score)
callback(ranksForMembers))
)
tiesLeaderboardKey: (leaderboardName) ->
"#{leaderboardName}:#{@tiesNamespace}"
module.exports = TieRankingLeaderboard
|
[
{
"context": "ription) ->\n return el Checkbox, {\n key: \"settings.#{name}\"\n checked: Settings.getBool(name)\n label",
"end": 737,
"score": 0.8504312634468079,
"start": 720,
"tag": "KEY",
"value": "settings.#{name}\""
},
{
"context": ">\n selectField = el Sele... | src/ui/views/SettingsView.coffee | joedrago/node-crackers | 3 | # React
React = require 'react'
DOM = require 'react-dom'
Loader = require 'react-loader'
# Material UI components
Checkbox = require 'material-ui/lib/checkbox'
MenuItem = require 'material-ui/lib/menus/menu-item'
SelectField = require 'material-ui/lib/select-field'
# Local requires
Settings = require '../Settings'
tags = require '../tags'
{el} = require '../tags'
class SettingsView extends React.Component
constructor: (props) ->
super props
@state =
kick: 0
kick: ->
@setState { kick: @state.kick + 1 }
return
toggle: (name) ->
Settings.set(name, not Settings.getBool(name))
@kick()
return
createCheckbox: (name, description) ->
return el Checkbox, {
key: "settings.#{name}"
checked: Settings.getBool(name)
label: description
onCheck: => @toggle(name)
}
createZoombox: (name, value, enabled, description) ->
selectField = el SelectField, {
key: "zoombox.#{name}"
value: value
disabled: !enabled
onChange: (event, index, value) =>
console.log "changing #{name} to #{value}"
Settings.set(name, value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
el MenuItem, { value: 2.5, primaryText: '2.5x' }
el MenuItem, { value: 3, primaryText: '3x' }
]
return tags.div {}, [selectField]
createAutotouch: ->
selectField = el SelectField, {
value: Settings.getFloat('comic.autotouch')
onChange: (event, index, value) =>
Settings.set('comic.autotouch', value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
]
return tags.div {}, [selectField]
miniTitleStyle: ->
return {
color: '#aaaaaa'
fontSize: '1.1em'
fontStyle: 'italic'
marginTop: '20px'
marginBottom: '5px'
}
render: ->
elements = []
elements.push tags.div {
key: 'settings.title'
style:
color: '#aaaaaa'
fontSize: '1.2em'
fontStyle: 'italic'
marginBottom: '15px'
}, "Settings"
# ------------------------------------------------------------------------
elements.push @createCheckbox('comic.animation', "Enable comic animation")
elements.push @createCheckbox('comic.showPageNumber', "Display the page number when switching pages")
elements.push @createCheckbox('comic.confirmBinge', "Display confirmation dialog when auto-switching to the next/previous issue")
elements.push @createCheckbox('comic.spaceAutoRead', "Space bar performs Autoread (vs simply 'next page')")
elements.push @createCheckbox('updates.detailed', "Show detailed view when listing all dates in Updates")
elements.push @createCheckbox('fakebackbutton.force', "Always display the fake back button")
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.zoomlevelstitle'
style: @miniTitleStyle()
}, "Zoom levels on double click/tap:"
zoom1 = Settings.getFloat("comic.dblzoom1")
zoom2 = Settings.getFloat("comic.dblzoom2")
zoom3 = Settings.getFloat("comic.dblzoom3")
if zoom1 == 0
zoom2 = 0
zoom3 = 0
if zoom2 == 0
zoom3 = 0
elements.push @createZoombox('comic.dblzoom1', zoom1, true, "zoom1")
elements.push @createZoombox('comic.dblzoom2', zoom2, (zoom1 > 0), "zoom2")
elements.push @createZoombox('comic.dblzoom3', zoom3, (zoom2 > 0), "zoom3")
# ------------------------------------------------------------------------
elements.push tags.hr {
key: "hr.touchonly"
size: 1
style:
marginTop: '20px'
marginBottom: '20px'
borderColor: '#777777'
}
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.touchonly'
style: @miniTitleStyle()
}, "Touch devices only (tablet / phone):"
elements.push @createCheckbox('fullscreen.overlay', "Force fullscreen mode with fullscreen overlay (when not in fullscreen)")
elements.push @createCheckbox('comic.autoZoomOut', "Automatically unzoom when you aren't touching the screen")
elements.push @createCheckbox('comic.zoomgrid', "Use zoomgrid")
elements.push tags.div {
key: 'settings.autotouchtitle'
style: @miniTitleStyle()
}, "Enable autoread in landscape mode (touch devices only, choose scale):"
elements.push @createAutotouch()
# ------------------------------------------------------------------------
view = tags.div {
style:
marginTop: '10px'
marginLeft: '60px'
}, elements
return view
module.exports = SettingsView
| 196280 | # React
React = require 'react'
DOM = require 'react-dom'
Loader = require 'react-loader'
# Material UI components
Checkbox = require 'material-ui/lib/checkbox'
MenuItem = require 'material-ui/lib/menus/menu-item'
SelectField = require 'material-ui/lib/select-field'
# Local requires
Settings = require '../Settings'
tags = require '../tags'
{el} = require '../tags'
class SettingsView extends React.Component
constructor: (props) ->
super props
@state =
kick: 0
kick: ->
@setState { kick: @state.kick + 1 }
return
toggle: (name) ->
Settings.set(name, not Settings.getBool(name))
@kick()
return
createCheckbox: (name, description) ->
return el Checkbox, {
key: "<KEY>
checked: Settings.getBool(name)
label: description
onCheck: => @toggle(name)
}
createZoombox: (name, value, enabled, description) ->
selectField = el SelectField, {
key: "<KEY>
value: value
disabled: !enabled
onChange: (event, index, value) =>
console.log "changing #{name} to #{value}"
Settings.set(name, value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
el MenuItem, { value: 2.5, primaryText: '2.5x' }
el MenuItem, { value: 3, primaryText: '3x' }
]
return tags.div {}, [selectField]
createAutotouch: ->
selectField = el SelectField, {
value: Settings.getFloat('comic.autotouch')
onChange: (event, index, value) =>
Settings.set('comic.autotouch', value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
]
return tags.div {}, [selectField]
miniTitleStyle: ->
return {
color: '#aaaaaa'
fontSize: '1.1em'
fontStyle: 'italic'
marginTop: '20px'
marginBottom: '5px'
}
render: ->
elements = []
elements.push tags.div {
key: 'settings.title'
style:
color: '#aaaaaa'
fontSize: '1.2em'
fontStyle: 'italic'
marginBottom: '15px'
}, "Settings"
# ------------------------------------------------------------------------
elements.push @createCheckbox('comic.animation', "Enable comic animation")
elements.push @createCheckbox('comic.showPageNumber', "Display the page number when switching pages")
elements.push @createCheckbox('comic.confirmBinge', "Display confirmation dialog when auto-switching to the next/previous issue")
elements.push @createCheckbox('comic.spaceAutoRead', "Space bar performs Autoread (vs simply 'next page')")
elements.push @createCheckbox('updates.detailed', "Show detailed view when listing all dates in Updates")
elements.push @createCheckbox('fakebackbutton.force', "Always display the fake back button")
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.zoomlevelstitle'
style: @miniTitleStyle()
}, "Zoom levels on double click/tap:"
zoom1 = Settings.getFloat("comic.dblzoom1")
zoom2 = Settings.getFloat("comic.dblzoom2")
zoom3 = Settings.getFloat("comic.dblzoom3")
if zoom1 == 0
zoom2 = 0
zoom3 = 0
if zoom2 == 0
zoom3 = 0
elements.push @createZoombox('comic.dblzoom1', zoom1, true, "zoom1")
elements.push @createZoombox('comic.dblzoom2', zoom2, (zoom1 > 0), "zoom2")
elements.push @createZoombox('comic.dblzoom3', zoom3, (zoom2 > 0), "zoom3")
# ------------------------------------------------------------------------
elements.push tags.hr {
key: "hr.touchonly"
size: 1
style:
marginTop: '20px'
marginBottom: '20px'
borderColor: '#777777'
}
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.touchonly'
style: @miniTitleStyle()
}, "Touch devices only (tablet / phone):"
elements.push @createCheckbox('fullscreen.overlay', "Force fullscreen mode with fullscreen overlay (when not in fullscreen)")
elements.push @createCheckbox('comic.autoZoomOut', "Automatically unzoom when you aren't touching the screen")
elements.push @createCheckbox('comic.zoomgrid', "Use zoomgrid")
elements.push tags.div {
key: 'settings.autotouchtitle'
style: @miniTitleStyle()
}, "Enable autoread in landscape mode (touch devices only, choose scale):"
elements.push @createAutotouch()
# ------------------------------------------------------------------------
view = tags.div {
style:
marginTop: '10px'
marginLeft: '60px'
}, elements
return view
module.exports = SettingsView
| true | # React
React = require 'react'
DOM = require 'react-dom'
Loader = require 'react-loader'
# Material UI components
Checkbox = require 'material-ui/lib/checkbox'
MenuItem = require 'material-ui/lib/menus/menu-item'
SelectField = require 'material-ui/lib/select-field'
# Local requires
Settings = require '../Settings'
tags = require '../tags'
{el} = require '../tags'
class SettingsView extends React.Component
constructor: (props) ->
super props
@state =
kick: 0
kick: ->
@setState { kick: @state.kick + 1 }
return
toggle: (name) ->
Settings.set(name, not Settings.getBool(name))
@kick()
return
createCheckbox: (name, description) ->
return el Checkbox, {
key: "PI:KEY:<KEY>END_PI
checked: Settings.getBool(name)
label: description
onCheck: => @toggle(name)
}
createZoombox: (name, value, enabled, description) ->
selectField = el SelectField, {
key: "PI:KEY:<KEY>END_PI
value: value
disabled: !enabled
onChange: (event, index, value) =>
console.log "changing #{name} to #{value}"
Settings.set(name, value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
el MenuItem, { value: 2.5, primaryText: '2.5x' }
el MenuItem, { value: 3, primaryText: '3x' }
]
return tags.div {}, [selectField]
createAutotouch: ->
selectField = el SelectField, {
value: Settings.getFloat('comic.autotouch')
onChange: (event, index, value) =>
Settings.set('comic.autotouch', value)
@kick()
}, [
el MenuItem, { value: 0, primaryText: 'Disabled' }
el MenuItem, { value: 1.5, primaryText: '1.5x' }
el MenuItem, { value: 2, primaryText: '2x' }
]
return tags.div {}, [selectField]
miniTitleStyle: ->
return {
color: '#aaaaaa'
fontSize: '1.1em'
fontStyle: 'italic'
marginTop: '20px'
marginBottom: '5px'
}
render: ->
elements = []
elements.push tags.div {
key: 'settings.title'
style:
color: '#aaaaaa'
fontSize: '1.2em'
fontStyle: 'italic'
marginBottom: '15px'
}, "Settings"
# ------------------------------------------------------------------------
elements.push @createCheckbox('comic.animation', "Enable comic animation")
elements.push @createCheckbox('comic.showPageNumber', "Display the page number when switching pages")
elements.push @createCheckbox('comic.confirmBinge', "Display confirmation dialog when auto-switching to the next/previous issue")
elements.push @createCheckbox('comic.spaceAutoRead', "Space bar performs Autoread (vs simply 'next page')")
elements.push @createCheckbox('updates.detailed', "Show detailed view when listing all dates in Updates")
elements.push @createCheckbox('fakebackbutton.force', "Always display the fake back button")
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.zoomlevelstitle'
style: @miniTitleStyle()
}, "Zoom levels on double click/tap:"
zoom1 = Settings.getFloat("comic.dblzoom1")
zoom2 = Settings.getFloat("comic.dblzoom2")
zoom3 = Settings.getFloat("comic.dblzoom3")
if zoom1 == 0
zoom2 = 0
zoom3 = 0
if zoom2 == 0
zoom3 = 0
elements.push @createZoombox('comic.dblzoom1', zoom1, true, "zoom1")
elements.push @createZoombox('comic.dblzoom2', zoom2, (zoom1 > 0), "zoom2")
elements.push @createZoombox('comic.dblzoom3', zoom3, (zoom2 > 0), "zoom3")
# ------------------------------------------------------------------------
elements.push tags.hr {
key: "hr.touchonly"
size: 1
style:
marginTop: '20px'
marginBottom: '20px'
borderColor: '#777777'
}
# ------------------------------------------------------------------------
elements.push tags.div {
key: 'settings.touchonly'
style: @miniTitleStyle()
}, "Touch devices only (tablet / phone):"
elements.push @createCheckbox('fullscreen.overlay', "Force fullscreen mode with fullscreen overlay (when not in fullscreen)")
elements.push @createCheckbox('comic.autoZoomOut', "Automatically unzoom when you aren't touching the screen")
elements.push @createCheckbox('comic.zoomgrid', "Use zoomgrid")
elements.push tags.div {
key: 'settings.autotouchtitle'
style: @miniTitleStyle()
}, "Enable autoread in landscape mode (touch devices only, choose scale):"
elements.push @createAutotouch()
# ------------------------------------------------------------------------
view = tags.div {
style:
marginTop: '10px'
marginLeft: '60px'
}, elements
return view
module.exports = SettingsView
|
[
{
"context": "in. If the error keeps occurring, <a href='mailto:pdfy@cryto.net'>send us an e-mail</a>!\"\n\n\tif errorMessage?\n\t\ttri",
"end": 912,
"score": 0.9999294281005859,
"start": 898,
"tag": "EMAIL",
"value": "pdfy@cryto.net"
}
] | frontend/lib/upload.coffee | rallias/pdfy2 | 0 | $ = require "jquery"
prettyUnits = require "pretty-units"
# The AMD loader for this package doesn't work for some reason - so we explicitly disable it. This will force it to fall back to the CommonJS API.
require "blueimp-file-upload/js/jquery.fileupload"
data_object = null
uploadDone = (response) ->
switch response.status
when 415
errorHeader = "Oops! That's not a PDF file."
errorMessage = "The file you tried to upload is not a valid PDF file. Currently, only PDF files are accepted."
when 413
errorHeader = "Oops! That file is too big."
errorMessage = "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size."
when 200 # Nothing, success!
else
errorHeader = "Oops! Something went wrong."
errorMessage = "An unknown error occurred. Please reload the page and try again. If the error keeps occurring, <a href='mailto:pdfy@cryto.net'>send us an e-mail</a>!"
if errorMessage?
triggerError errorHeader, errorMessage
reinitializeUploader()
else
if response.responseJSON.redirect?
window.location = response.responseJSON.redirect
else
# TODO: Wat do?
triggerError = (header, message) ->
$(".upload-form .privacySettings, .upload-form .progress, .upload-form .button-submit").hide()
$(".upload").removeClass("faded")
errorBox = $("#uploadError")
.show()
errorBox.find "h3"
.html header
errorBox.find ".message"
.html message
data_object = null
reinitializeUploader = ->
$("#upload_element").replaceWith($("#upload_element").clone(true))
filePicked = (data) ->
$(".upload-form .privacySettings, .upload-form .button-submit").show()
$("#uploadError").hide()
$(".upload-form .fileinfo").removeClass("faded")
fileinfo = $(".fileinfo")
filesize = data.files[0].size
# TODO: Use filesize limit from configuration file!
if filesize > (150 * 1024 * 1024)
reinitializeUploader()
triggerError("Oops! That file is too big.", "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size.")
return
filesize_text = prettyUnits(filesize) + "B"
fileinfo.find ".filename"
.text data.files[0].name
fileinfo.find ".filesize"
.text filesize_text
$ ".info"
.hide()
fileinfo
.show()
$ ".upload"
.addClass "faded"
updateUploadProgress = (event) ->
if event.lengthComputable
percentage = event.loaded / event.total * 100
done_text = prettyUnits(event.loaded) + "B"
total_text = prettyUnits(event.total) + "B"
progress = $ ".progress"
progress.find ".done"
.text done_text
progress.find ".total"
.text total_text
progress.find ".percentage"
.text (Math.ceil(percentage * 100) / 100)
progress.find ".bar-inner"
.css width: "#{percentage}%"
if event.loaded >= event.total
# Completed!
progress.find ".numbers"
.hide()
progress.find ".wait"
.show()
$ ->
if $().fileupload?
# Only run this if the fileupload plugin is loaded; we don't need all this on eg. the 'view' page.
$ "#upload_form"
.fileupload
fileInput: null
type: "POST"
url: "/upload"
paramName: "file"
autoUpload: false
maxNumberOfFiles: 1
formData: (form) ->
form = $ "#upload_form"
form.serializeArray()
progressall: (e, data) ->
updateUploadProgress
lengthComputable: true
loaded: data.loaded
total: data.total
add: (e, data) ->
data_object = data
filePicked(data)
always: (e, data) ->
uploadDone(data.jqXHR)
$ "#upload_activator"
.on "click", (event) ->
$("#upload_element").click()
$ "#upload_element"
.on "change", (event) ->
filePicked(this)
$ "#upload_form"
.on "submit", (event) ->
event.stopPropagation()
event.preventDefault()
$ ".fileinfo"
.addClass "faded"
$ ".progress"
.show()
if data_object == null
# Only do this if the drag-and-drop dropzone hasn't been used.
formData = new FormData(this)
$.ajax
method: "POST"
url: "/upload"
data: formData
cache: false
contentType: false
processData: false
xhr: ->
customHandler = $.ajaxSettings.xhr()
if customHandler.upload?
customHandler.upload.addEventListener "progress", updateUploadProgress, false
return customHandler
complete: (result) ->
uploadDone(result)
else
# If the dropzone was used...
data_object.submit()
| 125474 | $ = require "jquery"
prettyUnits = require "pretty-units"
# The AMD loader for this package doesn't work for some reason - so we explicitly disable it. This will force it to fall back to the CommonJS API.
require "blueimp-file-upload/js/jquery.fileupload"
data_object = null
uploadDone = (response) ->
switch response.status
when 415
errorHeader = "Oops! That's not a PDF file."
errorMessage = "The file you tried to upload is not a valid PDF file. Currently, only PDF files are accepted."
when 413
errorHeader = "Oops! That file is too big."
errorMessage = "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size."
when 200 # Nothing, success!
else
errorHeader = "Oops! Something went wrong."
errorMessage = "An unknown error occurred. Please reload the page and try again. If the error keeps occurring, <a href='mailto:<EMAIL>'>send us an e-mail</a>!"
if errorMessage?
triggerError errorHeader, errorMessage
reinitializeUploader()
else
if response.responseJSON.redirect?
window.location = response.responseJSON.redirect
else
# TODO: Wat do?
triggerError = (header, message) ->
$(".upload-form .privacySettings, .upload-form .progress, .upload-form .button-submit").hide()
$(".upload").removeClass("faded")
errorBox = $("#uploadError")
.show()
errorBox.find "h3"
.html header
errorBox.find ".message"
.html message
data_object = null
reinitializeUploader = ->
$("#upload_element").replaceWith($("#upload_element").clone(true))
filePicked = (data) ->
$(".upload-form .privacySettings, .upload-form .button-submit").show()
$("#uploadError").hide()
$(".upload-form .fileinfo").removeClass("faded")
fileinfo = $(".fileinfo")
filesize = data.files[0].size
# TODO: Use filesize limit from configuration file!
if filesize > (150 * 1024 * 1024)
reinitializeUploader()
triggerError("Oops! That file is too big.", "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size.")
return
filesize_text = prettyUnits(filesize) + "B"
fileinfo.find ".filename"
.text data.files[0].name
fileinfo.find ".filesize"
.text filesize_text
$ ".info"
.hide()
fileinfo
.show()
$ ".upload"
.addClass "faded"
updateUploadProgress = (event) ->
if event.lengthComputable
percentage = event.loaded / event.total * 100
done_text = prettyUnits(event.loaded) + "B"
total_text = prettyUnits(event.total) + "B"
progress = $ ".progress"
progress.find ".done"
.text done_text
progress.find ".total"
.text total_text
progress.find ".percentage"
.text (Math.ceil(percentage * 100) / 100)
progress.find ".bar-inner"
.css width: "#{percentage}%"
if event.loaded >= event.total
# Completed!
progress.find ".numbers"
.hide()
progress.find ".wait"
.show()
$ ->
if $().fileupload?
# Only run this if the fileupload plugin is loaded; we don't need all this on eg. the 'view' page.
$ "#upload_form"
.fileupload
fileInput: null
type: "POST"
url: "/upload"
paramName: "file"
autoUpload: false
maxNumberOfFiles: 1
formData: (form) ->
form = $ "#upload_form"
form.serializeArray()
progressall: (e, data) ->
updateUploadProgress
lengthComputable: true
loaded: data.loaded
total: data.total
add: (e, data) ->
data_object = data
filePicked(data)
always: (e, data) ->
uploadDone(data.jqXHR)
$ "#upload_activator"
.on "click", (event) ->
$("#upload_element").click()
$ "#upload_element"
.on "change", (event) ->
filePicked(this)
$ "#upload_form"
.on "submit", (event) ->
event.stopPropagation()
event.preventDefault()
$ ".fileinfo"
.addClass "faded"
$ ".progress"
.show()
if data_object == null
# Only do this if the drag-and-drop dropzone hasn't been used.
formData = new FormData(this)
$.ajax
method: "POST"
url: "/upload"
data: formData
cache: false
contentType: false
processData: false
xhr: ->
customHandler = $.ajaxSettings.xhr()
if customHandler.upload?
customHandler.upload.addEventListener "progress", updateUploadProgress, false
return customHandler
complete: (result) ->
uploadDone(result)
else
# If the dropzone was used...
data_object.submit()
| true | $ = require "jquery"
prettyUnits = require "pretty-units"
# The AMD loader for this package doesn't work for some reason - so we explicitly disable it. This will force it to fall back to the CommonJS API.
require "blueimp-file-upload/js/jquery.fileupload"
data_object = null
uploadDone = (response) ->
switch response.status
when 415
errorHeader = "Oops! That's not a PDF file."
errorMessage = "The file you tried to upload is not a valid PDF file. Currently, only PDF files are accepted."
when 413
errorHeader = "Oops! That file is too big."
errorMessage = "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size."
when 200 # Nothing, success!
else
errorHeader = "Oops! Something went wrong."
errorMessage = "An unknown error occurred. Please reload the page and try again. If the error keeps occurring, <a href='mailto:PI:EMAIL:<EMAIL>END_PI'>send us an e-mail</a>!"
if errorMessage?
triggerError errorHeader, errorMessage
reinitializeUploader()
else
if response.responseJSON.redirect?
window.location = response.responseJSON.redirect
else
# TODO: Wat do?
triggerError = (header, message) ->
$(".upload-form .privacySettings, .upload-form .progress, .upload-form .button-submit").hide()
$(".upload").removeClass("faded")
errorBox = $("#uploadError")
.show()
errorBox.find "h3"
.html header
errorBox.find ".message"
.html message
data_object = null
reinitializeUploader = ->
$("#upload_element").replaceWith($("#upload_element").clone(true))
filePicked = (data) ->
$(".upload-form .privacySettings, .upload-form .button-submit").show()
$("#uploadError").hide()
$(".upload-form .fileinfo").removeClass("faded")
fileinfo = $(".fileinfo")
filesize = data.files[0].size
# TODO: Use filesize limit from configuration file!
if filesize > (150 * 1024 * 1024)
reinitializeUploader()
triggerError("Oops! That file is too big.", "The file you tried to upload is too big. Currently, you can only upload PDF files up to 150MB in size.")
return
filesize_text = prettyUnits(filesize) + "B"
fileinfo.find ".filename"
.text data.files[0].name
fileinfo.find ".filesize"
.text filesize_text
$ ".info"
.hide()
fileinfo
.show()
$ ".upload"
.addClass "faded"
updateUploadProgress = (event) ->
if event.lengthComputable
percentage = event.loaded / event.total * 100
done_text = prettyUnits(event.loaded) + "B"
total_text = prettyUnits(event.total) + "B"
progress = $ ".progress"
progress.find ".done"
.text done_text
progress.find ".total"
.text total_text
progress.find ".percentage"
.text (Math.ceil(percentage * 100) / 100)
progress.find ".bar-inner"
.css width: "#{percentage}%"
if event.loaded >= event.total
# Completed!
progress.find ".numbers"
.hide()
progress.find ".wait"
.show()
$ ->
if $().fileupload?
# Only run this if the fileupload plugin is loaded; we don't need all this on eg. the 'view' page.
$ "#upload_form"
.fileupload
fileInput: null
type: "POST"
url: "/upload"
paramName: "file"
autoUpload: false
maxNumberOfFiles: 1
formData: (form) ->
form = $ "#upload_form"
form.serializeArray()
progressall: (e, data) ->
updateUploadProgress
lengthComputable: true
loaded: data.loaded
total: data.total
add: (e, data) ->
data_object = data
filePicked(data)
always: (e, data) ->
uploadDone(data.jqXHR)
$ "#upload_activator"
.on "click", (event) ->
$("#upload_element").click()
$ "#upload_element"
.on "change", (event) ->
filePicked(this)
$ "#upload_form"
.on "submit", (event) ->
event.stopPropagation()
event.preventDefault()
$ ".fileinfo"
.addClass "faded"
$ ".progress"
.show()
if data_object == null
# Only do this if the drag-and-drop dropzone hasn't been used.
formData = new FormData(this)
$.ajax
method: "POST"
url: "/upload"
data: formData
cache: false
contentType: false
processData: false
xhr: ->
customHandler = $.ajaxSettings.xhr()
if customHandler.upload?
customHandler.upload.addEventListener "progress", updateUploadProgress, false
return customHandler
complete: (result) ->
uploadDone(result)
else
# If the dropzone was used...
data_object.submit()
|
[
{
"context": "s.gamer_id, creds.gamer_secret\n\t\t\t.send {password:newPassword}\n\t\t\t.end (err, res)->\n\t\t\t\tif err? then cb(err)\n\t\t",
"end": 6718,
"score": 0.9989533424377441,
"start": 6707,
"tag": "PASSWORD",
"value": "newPassword"
}
] | src/Clan.coffee | clanofthecloud/javascript-client | 1 | agent = require 'superagent'
unless agent.Request.prototype.use?
agent.Request.prototype.use = (fn)->
fn(@)
@
Endpoints = require './endpoints.coffee'
prefixer = require './prefixer.coffee'
ClanError = require './ClanError.coffee'
Clan = module.exports = (apikey, apisecret, endpoint=null)->
Endpoints.set endpoint if endpoint?
appCredentials = {'x-apikey': apikey, 'x-apisecret': apisecret}
createGamerCredentials: (gamer)->
{gamer_id: gamer.gamer_id, gamer_secret: gamer.gamer_secret}
loginAnonymous: (options, cb)->
agent.post '/v1/login/anonymous'
.use prefixer
.send {options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.error
else cb null, res.body, true
login: (network, id, secret, options, cb)->
if typeof options is "function"
cb = options
options = null
if network?
agent
.post '/v1/login'
.use prefixer
.send {network, id, secret, options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, false
else
cb = id
agent
.post '/v1/login/anonymous'
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, true
resumeSession: (gamer_id, gamer_secret, cb)->
@login "anonymous", gamer_id, gamer_secret, {preventRegistration :true}, cb
loginWithShortCode: (shortcode, cb)->
@login "restore", "", shortcode, {preventRegistration :true}, cb
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
vfs: (domain='private')->
require('./gamevfs.coffee')(appCredentials, domain)
indexes: (domain='private')->
require('./indexes.coffee')(appCredentials, domain)
# the mailBody will be parsed to replace occurences of [[SHORTCODE]] by actual shortcode
sendResetMailPassword: ( userEmail, mailSender, mailTitle, mailBody, cb)->
textBody = "[[SHORTCODE]]"
htmlBody = null
if typeof mailBody == "object"
if mailBody.html == true
htmlBody = mailBody.body
else
textBody = mailBody.body
else
textBody = mailBody
body =
from: mailSender
title: mailTitle
body: textBody
if htmlBody != null then body.html = htmlBody
agent
.post "/v1/login/#{userEmail}"
.use prefixer
.set appCredentials
.send body
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
userExists: (network, id, cb)->
agent
.get "/v1/users/#{network}/#{encodeURIComponent(id)}"
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
withGamer: (gamer)->
creds = this.createGamerCredentials gamer
transactions: (domain='private')->
require('./transactions.coffee')(appCredentials, creds, domain)
gamervfs: (domain='private')->
require('./gamervfs.coffee')(appCredentials, creds, domain)
friends: (domain='private')->
require('./friends.coffee')(appCredentials, creds, domain)
profile: ()->
require('./profile.coffee')(appCredentials, creds)
properties: (domain='private')->
require('./properties.coffee')(appCredentials, creds, domain)
leaderboards: (domain='private')->
require('./leaderboards.coffee')(appCredentials, creds, domain)
matches: (domain='private')->
require('./matches.coffee')(appCredentials, creds, domain)
kv: (domain='private')->
require('./kvstore.coffee')(appCredentials, creds, domain)
events: (domain='private')->
require('./event.coffee')(appCredentials, creds, domain)
achievements: (domain='private')->
require('./achievements.coffee')(appCredentials, creds, domain)
referral: (domain='private')->
require('./referral.coffee')(appCredentials, creds, domain)
store: ()->
require('./store.coffee')(appCredentials, creds)
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/gamer/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
outline: (cb)->
agent
.get '/v1/gamer/outline'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
convertTo: (network, id, secret, cb)->
agent
.post '/v1/gamer/convert'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
link: (network, id, secret, cb)->
agent
.post 'v1/gamer/link'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
unlink: (network, cb)->
agent
.post 'v1/gamer/unlink'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
logout: (cb)->
agent
.post '/v1/gamer/logout'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changeEmail: (newEmailAddress, cb)->
agent
.post '/v1/gamer/email'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {email:newEmailAddress}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changePassword: (newPassword, cb)->
agent
.post '/v1/gamer/password'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {password:newPassword}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
listUsers: (filter, limit, skip, cb)->
agent
.get "/v1/gamer?q=#{encodeURIComponent(filter)}&limit=#{limit}&skip=#{skip}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
privateDomain: 'private'
| 108679 | agent = require 'superagent'
unless agent.Request.prototype.use?
agent.Request.prototype.use = (fn)->
fn(@)
@
Endpoints = require './endpoints.coffee'
prefixer = require './prefixer.coffee'
ClanError = require './ClanError.coffee'
Clan = module.exports = (apikey, apisecret, endpoint=null)->
Endpoints.set endpoint if endpoint?
appCredentials = {'x-apikey': apikey, 'x-apisecret': apisecret}
createGamerCredentials: (gamer)->
{gamer_id: gamer.gamer_id, gamer_secret: gamer.gamer_secret}
loginAnonymous: (options, cb)->
agent.post '/v1/login/anonymous'
.use prefixer
.send {options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.error
else cb null, res.body, true
login: (network, id, secret, options, cb)->
if typeof options is "function"
cb = options
options = null
if network?
agent
.post '/v1/login'
.use prefixer
.send {network, id, secret, options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, false
else
cb = id
agent
.post '/v1/login/anonymous'
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, true
resumeSession: (gamer_id, gamer_secret, cb)->
@login "anonymous", gamer_id, gamer_secret, {preventRegistration :true}, cb
loginWithShortCode: (shortcode, cb)->
@login "restore", "", shortcode, {preventRegistration :true}, cb
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
vfs: (domain='private')->
require('./gamevfs.coffee')(appCredentials, domain)
indexes: (domain='private')->
require('./indexes.coffee')(appCredentials, domain)
# the mailBody will be parsed to replace occurences of [[SHORTCODE]] by actual shortcode
sendResetMailPassword: ( userEmail, mailSender, mailTitle, mailBody, cb)->
textBody = "[[SHORTCODE]]"
htmlBody = null
if typeof mailBody == "object"
if mailBody.html == true
htmlBody = mailBody.body
else
textBody = mailBody.body
else
textBody = mailBody
body =
from: mailSender
title: mailTitle
body: textBody
if htmlBody != null then body.html = htmlBody
agent
.post "/v1/login/#{userEmail}"
.use prefixer
.set appCredentials
.send body
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
userExists: (network, id, cb)->
agent
.get "/v1/users/#{network}/#{encodeURIComponent(id)}"
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
withGamer: (gamer)->
creds = this.createGamerCredentials gamer
transactions: (domain='private')->
require('./transactions.coffee')(appCredentials, creds, domain)
gamervfs: (domain='private')->
require('./gamervfs.coffee')(appCredentials, creds, domain)
friends: (domain='private')->
require('./friends.coffee')(appCredentials, creds, domain)
profile: ()->
require('./profile.coffee')(appCredentials, creds)
properties: (domain='private')->
require('./properties.coffee')(appCredentials, creds, domain)
leaderboards: (domain='private')->
require('./leaderboards.coffee')(appCredentials, creds, domain)
matches: (domain='private')->
require('./matches.coffee')(appCredentials, creds, domain)
kv: (domain='private')->
require('./kvstore.coffee')(appCredentials, creds, domain)
events: (domain='private')->
require('./event.coffee')(appCredentials, creds, domain)
achievements: (domain='private')->
require('./achievements.coffee')(appCredentials, creds, domain)
referral: (domain='private')->
require('./referral.coffee')(appCredentials, creds, domain)
store: ()->
require('./store.coffee')(appCredentials, creds)
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/gamer/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
outline: (cb)->
agent
.get '/v1/gamer/outline'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
convertTo: (network, id, secret, cb)->
agent
.post '/v1/gamer/convert'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
link: (network, id, secret, cb)->
agent
.post 'v1/gamer/link'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
unlink: (network, cb)->
agent
.post 'v1/gamer/unlink'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
logout: (cb)->
agent
.post '/v1/gamer/logout'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changeEmail: (newEmailAddress, cb)->
agent
.post '/v1/gamer/email'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {email:newEmailAddress}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changePassword: (newPassword, cb)->
agent
.post '/v1/gamer/password'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {password:<PASSWORD>}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
listUsers: (filter, limit, skip, cb)->
agent
.get "/v1/gamer?q=#{encodeURIComponent(filter)}&limit=#{limit}&skip=#{skip}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
privateDomain: 'private'
| true | agent = require 'superagent'
unless agent.Request.prototype.use?
agent.Request.prototype.use = (fn)->
fn(@)
@
Endpoints = require './endpoints.coffee'
prefixer = require './prefixer.coffee'
ClanError = require './ClanError.coffee'
Clan = module.exports = (apikey, apisecret, endpoint=null)->
Endpoints.set endpoint if endpoint?
appCredentials = {'x-apikey': apikey, 'x-apisecret': apisecret}
createGamerCredentials: (gamer)->
{gamer_id: gamer.gamer_id, gamer_secret: gamer.gamer_secret}
loginAnonymous: (options, cb)->
agent.post '/v1/login/anonymous'
.use prefixer
.send {options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.error
else cb null, res.body, true
login: (network, id, secret, options, cb)->
if typeof options is "function"
cb = options
options = null
if network?
agent
.post '/v1/login'
.use prefixer
.send {network, id, secret, options}
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, false
else
cb = id
agent
.post '/v1/login/anonymous'
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body, true
resumeSession: (gamer_id, gamer_secret, cb)->
@login "anonymous", gamer_id, gamer_secret, {preventRegistration :true}, cb
loginWithShortCode: (shortcode, cb)->
@login "restore", "", shortcode, {preventRegistration :true}, cb
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
vfs: (domain='private')->
require('./gamevfs.coffee')(appCredentials, domain)
indexes: (domain='private')->
require('./indexes.coffee')(appCredentials, domain)
# the mailBody will be parsed to replace occurences of [[SHORTCODE]] by actual shortcode
sendResetMailPassword: ( userEmail, mailSender, mailTitle, mailBody, cb)->
textBody = "[[SHORTCODE]]"
htmlBody = null
if typeof mailBody == "object"
if mailBody.html == true
htmlBody = mailBody.body
else
textBody = mailBody.body
else
textBody = mailBody
body =
from: mailSender
title: mailTitle
body: textBody
if htmlBody != null then body.html = htmlBody
agent
.post "/v1/login/#{userEmail}"
.use prefixer
.set appCredentials
.send body
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
userExists: (network, id, cb)->
agent
.get "/v1/users/#{network}/#{encodeURIComponent(id)}"
.use prefixer
.set appCredentials
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
withGamer: (gamer)->
creds = this.createGamerCredentials gamer
transactions: (domain='private')->
require('./transactions.coffee')(appCredentials, creds, domain)
gamervfs: (domain='private')->
require('./gamervfs.coffee')(appCredentials, creds, domain)
friends: (domain='private')->
require('./friends.coffee')(appCredentials, creds, domain)
profile: ()->
require('./profile.coffee')(appCredentials, creds)
properties: (domain='private')->
require('./properties.coffee')(appCredentials, creds, domain)
leaderboards: (domain='private')->
require('./leaderboards.coffee')(appCredentials, creds, domain)
matches: (domain='private')->
require('./matches.coffee')(appCredentials, creds, domain)
kv: (domain='private')->
require('./kvstore.coffee')(appCredentials, creds, domain)
events: (domain='private')->
require('./event.coffee')(appCredentials, creds, domain)
achievements: (domain='private')->
require('./achievements.coffee')(appCredentials, creds, domain)
referral: (domain='private')->
require('./referral.coffee')(appCredentials, creds, domain)
store: ()->
require('./store.coffee')(appCredentials, creds)
runBatch: (domain, batchName, params, cb)->
agent
.post "/v1/gamer/batch/#{domain}/#{batchName}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send params
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
outline: (cb)->
agent
.get '/v1/gamer/outline'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
convertTo: (network, id, secret, cb)->
agent
.post '/v1/gamer/convert'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
link: (network, id, secret, cb)->
agent
.post 'v1/gamer/link'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network, id, secret}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
unlink: (network, cb)->
agent
.post 'v1/gamer/unlink'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {network}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
logout: (cb)->
agent
.post '/v1/gamer/logout'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changeEmail: (newEmailAddress, cb)->
agent
.post '/v1/gamer/email'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {email:newEmailAddress}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
changePassword: (newPassword, cb)->
agent
.post '/v1/gamer/password'
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.send {password:PI:PASSWORD:<PASSWORD>END_PI}
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
listUsers: (filter, limit, skip, cb)->
agent
.get "/v1/gamer?q=#{encodeURIComponent(filter)}&limit=#{limit}&skip=#{skip}"
.use prefixer
.set appCredentials
.auth creds.gamer_id, creds.gamer_secret
.end (err, res)->
if err? then cb(err)
else
if res.error then cb new ClanError res.status, res.body
else cb null, res.body
privateDomain: 'private'
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9994080066680908,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-vm-function-declaration.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
vm = require("vm")
o = vm.createContext(console: console)
# This triggers the setter callback in node_contextify.cc
code = "var a = function() {};\n"
# but this does not, since function decls are defineProperties,
# not simple sets.
code += "function b(){}\n"
# Grab the global b function as the completion value, to ensure that
# we are getting the global function, and not some other thing
code += "(function(){return this})().b;\n"
res = vm.runInContext(code, o, "test")
assert.equal typeof res, "function", "result should be function"
assert.equal res.name, "b", "res should be named b"
assert.equal typeof o.a, "function", "a should be function"
assert.equal typeof o.b, "function", "b should be function"
assert.equal res, o.b, "result should be global b function"
console.log "ok"
| 64794 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
vm = require("vm")
o = vm.createContext(console: console)
# This triggers the setter callback in node_contextify.cc
code = "var a = function() {};\n"
# but this does not, since function decls are defineProperties,
# not simple sets.
code += "function b(){}\n"
# Grab the global b function as the completion value, to ensure that
# we are getting the global function, and not some other thing
code += "(function(){return this})().b;\n"
res = vm.runInContext(code, o, "test")
assert.equal typeof res, "function", "result should be function"
assert.equal res.name, "b", "res should be named b"
assert.equal typeof o.a, "function", "a should be function"
assert.equal typeof o.b, "function", "b should be function"
assert.equal res, o.b, "result should be global b function"
console.log "ok"
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
vm = require("vm")
o = vm.createContext(console: console)
# This triggers the setter callback in node_contextify.cc
code = "var a = function() {};\n"
# but this does not, since function decls are defineProperties,
# not simple sets.
code += "function b(){}\n"
# Grab the global b function as the completion value, to ensure that
# we are getting the global function, and not some other thing
code += "(function(){return this})().b;\n"
res = vm.runInContext(code, o, "test")
assert.equal typeof res, "function", "result should be function"
assert.equal res.name, "b", "res should be named b"
assert.equal typeof o.a, "function", "a should be function"
assert.equal typeof o.b, "function", "b should be function"
assert.equal res, o.b, "result should be global b function"
console.log "ok"
|
[
{
"context": "MemoryDataSource(@records)\n @let 'records', -> ['Larry', 'Curly', 'Moe']\n\n describe 'without any filter",
"end": 854,
"score": 0.9489606618881226,
"start": 849,
"tag": "NAME",
"value": "Larry"
},
{
"context": "aSource(@records)\n @let 'records', -> ['Larry', 'Curl... | spec/javascripts/filtering_data_source_spec.js.coffee | beeflamian/shuttle | 327 | # Copyright 2014 Square Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#= require filtering_data_source
#= require memory_data_source
describe 'FilteringDataSource', ->
@let 'dataSource', -> new FilteringDataSource(@baseDataSource)
@let 'baseDataSource', -> new MemoryDataSource(@records)
@let 'records', -> ['Larry', 'Curly', 'Moe']
describe 'without any filters', ->
it 'returns the same records returned by the underlying data source', ->
expectPromise(@dataSource.fetch(2)).toResolveWith(['Larry', 'Curly'])
it 'returns no more records when there are no more to be had', ->
@dataSource.fetch(3)
expectPromise(@dataSource.fetch(3)).toResolveWith([])
it 'does not rewind past the first record', ->
@dataSource.fetch(3)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(3)).toResolveWith(['Larry', 'Curly', 'Moe'])
it 'fetches the lesser of the given limit and what is available', ->
expectPromise(@dataSource.fetch(100)).toResolveWith(['Larry', 'Curly', 'Moe'])
describe 'with a filter', ->
beforeEach ->
@dataSource.applyFilter 'short names', (name) -> name.length <= 3
it 'returns only those records matching the filter', ->
expectPromise(@dataSource.fetch(3)).toResolveWith(['Moe'])
it 'fetches multiple times if needed to get enough matching records', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(1)).toResolveWith(['Moe'])
expect(@baseDataSource.fetch.callCount).toEqual(3)
it 'removing the filter rewinds to the beginning', ->
@dataSource.fetch(2)
@dataSource.removeFilter 'short names'
expectPromise(@dataSource.fetch(1)).toResolveWith(['Moe'])
it 'can rewind a specific number of records', ->
@dataSource.fetch(1)
@dataSource.rewind(1)
expectPromise(@dataSource.fetch(1)).toResolveWith(['Moe'])
it 'does not rewind past the first record', ->
@dataSource.fetch(1)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(1)).toResolveWith(['Moe'])
describe 'when it would fetch more than it needs from the base data source', ->
@let 'records', -> ['Moe', 'John', 'Sam', 'Abe']
it 'fetches multiple times from the base data store', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(2)).toResolveWith(['Moe', 'Sam'])
expect(@baseDataSource.fetch.callCount).toEqual(2)
it 'rewinds to the appropriate offset after having fetched too many', ->
spyOn(@dataSource, 'rewind').andCallThrough()
@dataSource.fetch(2)
expectPromise(@dataSource.fetch(1)).toResolveWith(['Abe'])
runs => expect(@dataSource.rewind).toHaveBeenCalledWith(1)
| 60830 | # Copyright 2014 Square Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#= require filtering_data_source
#= require memory_data_source
describe 'FilteringDataSource', ->
@let 'dataSource', -> new FilteringDataSource(@baseDataSource)
@let 'baseDataSource', -> new MemoryDataSource(@records)
@let 'records', -> ['<NAME>', '<NAME>', '<NAME>']
describe 'without any filters', ->
it 'returns the same records returned by the underlying data source', ->
expectPromise(@dataSource.fetch(2)).toResolveWith(['<NAME>', '<NAME>'])
it 'returns no more records when there are no more to be had', ->
@dataSource.fetch(3)
expectPromise(@dataSource.fetch(3)).toResolveWith([])
it 'does not rewind past the first record', ->
@dataSource.fetch(3)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(3)).toResolveWith(['<NAME>', '<NAME>', '<NAME>'])
it 'fetches the lesser of the given limit and what is available', ->
expectPromise(@dataSource.fetch(100)).toResolveWith(['<NAME>', '<NAME>', '<NAME>'])
describe 'with a filter', ->
beforeEach ->
@dataSource.applyFilter 'short names', (name) -> name.length <= 3
it 'returns only those records matching the filter', ->
expectPromise(@dataSource.fetch(3)).toResolveWith(['Moe'])
it 'fetches multiple times if needed to get enough matching records', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(1)).toResolveWith(['<NAME>'])
expect(@baseDataSource.fetch.callCount).toEqual(3)
it 'removing the filter rewinds to the beginning', ->
@dataSource.fetch(2)
@dataSource.removeFilter 'short names'
expectPromise(@dataSource.fetch(1)).toResolveWith(['<NAME>'])
it 'can rewind a specific number of records', ->
@dataSource.fetch(1)
@dataSource.rewind(1)
expectPromise(@dataSource.fetch(1)).toResolveWith(['<NAME>'])
it 'does not rewind past the first record', ->
@dataSource.fetch(1)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(1)).toResolveWith(['<NAME>'])
describe 'when it would fetch more than it needs from the base data source', ->
@let 'records', -> ['<NAME>', '<NAME>', '<NAME>', '<NAME>']
it 'fetches multiple times from the base data store', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(2)).toResolveWith(['<NAME>', '<NAME>'])
expect(@baseDataSource.fetch.callCount).toEqual(2)
it 'rewinds to the appropriate offset after having fetched too many', ->
spyOn(@dataSource, 'rewind').andCallThrough()
@dataSource.fetch(2)
expectPromise(@dataSource.fetch(1)).toResolveWith(['<NAME>'])
runs => expect(@dataSource.rewind).toHaveBeenCalledWith(1)
| true | # Copyright 2014 Square Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#= require filtering_data_source
#= require memory_data_source
describe 'FilteringDataSource', ->
@let 'dataSource', -> new FilteringDataSource(@baseDataSource)
@let 'baseDataSource', -> new MemoryDataSource(@records)
@let 'records', -> ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
describe 'without any filters', ->
it 'returns the same records returned by the underlying data source', ->
expectPromise(@dataSource.fetch(2)).toResolveWith(['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'])
it 'returns no more records when there are no more to be had', ->
@dataSource.fetch(3)
expectPromise(@dataSource.fetch(3)).toResolveWith([])
it 'does not rewind past the first record', ->
@dataSource.fetch(3)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(3)).toResolveWith(['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'])
it 'fetches the lesser of the given limit and what is available', ->
expectPromise(@dataSource.fetch(100)).toResolveWith(['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'])
describe 'with a filter', ->
beforeEach ->
@dataSource.applyFilter 'short names', (name) -> name.length <= 3
it 'returns only those records matching the filter', ->
expectPromise(@dataSource.fetch(3)).toResolveWith(['Moe'])
it 'fetches multiple times if needed to get enough matching records', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(1)).toResolveWith(['PI:NAME:<NAME>END_PI'])
expect(@baseDataSource.fetch.callCount).toEqual(3)
it 'removing the filter rewinds to the beginning', ->
@dataSource.fetch(2)
@dataSource.removeFilter 'short names'
expectPromise(@dataSource.fetch(1)).toResolveWith(['PI:NAME:<NAME>END_PI'])
it 'can rewind a specific number of records', ->
@dataSource.fetch(1)
@dataSource.rewind(1)
expectPromise(@dataSource.fetch(1)).toResolveWith(['PI:NAME:<NAME>END_PI'])
it 'does not rewind past the first record', ->
@dataSource.fetch(1)
@dataSource.rewind(1000)
expectPromise(@dataSource.fetch(1)).toResolveWith(['PI:NAME:<NAME>END_PI'])
describe 'when it would fetch more than it needs from the base data source', ->
@let 'records', -> ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'fetches multiple times from the base data store', ->
spyOn(@baseDataSource, 'fetch').andCallThrough()
expectPromise(@dataSource.fetch(2)).toResolveWith(['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'])
expect(@baseDataSource.fetch.callCount).toEqual(2)
it 'rewinds to the appropriate offset after having fetched too many', ->
spyOn(@dataSource, 'rewind').andCallThrough()
@dataSource.fetch(2)
expectPromise(@dataSource.fetch(1)).toResolveWith(['PI:NAME:<NAME>END_PI'])
runs => expect(@dataSource.rewind).toHaveBeenCalledWith(1)
|
[
{
"context": "= yield utils.initUser()\n yield utils.loginUser(@user)\n prepaid = new Prepaid({creator: @user.id, ty",
"end": 2218,
"score": 0.9984169006347656,
"start": 2213,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "ginUser(@user)\n prepaid = new Prepaid({creato... | spec/server/functional/prepaid.spec.coffee | johanvl/codecombat | 2 | require '../common'
config = require '../../../server_config'
moment = require 'moment'
{findStripeSubscription} = require '../../../server/lib/utils'
async = require 'async'
nockUtils = require '../nock-utils'
utils = require '../utils'
Promise = require 'bluebird'
Payment = require '../../../server/models/Payment'
Prepaid = require '../../../server/models/Prepaid'
User = require '../../../server/models/User'
Course = require '../../../server/models/Course'
CourseInstance = require '../../../server/models/CourseInstance'
request = require '../request'
describe 'POST /db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([User, Prepaid])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
done()
it 'creates a new prepaid for type "course"', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('creator').equals(user._id)).toBe(true)
expect(prepaid.get('code')).toBeDefined()
done()
it 'does not work for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(403)
done()
it 'accepts start and end dates', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
startDate: new Date().toISOString(2001,1,1)
endDate: new Date().toISOString(2010,1,1)
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('startDate')).toBeDefined()
expect(prepaid.get('endDate')).toBeDefined()
done()
describe 'GET /db/prepaid', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
prepaid = new Prepaid({creator: @user.id, type: 'course'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'starter_license'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'terminal_subscription'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'subscription'})
yield prepaid.save()
done()
describe 'when creator param', ->
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}"), json: true})
expect(body.length).toEqual(2)
done()
describe 'when creator and allTypes=true', ->
it 'returns all for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}&allTypes=true"), json: true})
expect(body.length).toEqual(4)
done()
describe 'GET /db/prepaid/:handle/creator', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
@url = getURL("/db/prepaid/#{@prepaid.id}/creator")
done()
describe 'when the prepaid ID is wrong', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
@url = getURL("/db/prepaid/123456789012345678901234/creator")
done()
it 'returns a NotFound error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(404)
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is a joiner', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher', firstName: 'joiner', lastName: 'one'})
@joiner2 = yield utils.initUser({role: 'teacher', firstName: 'joiner', lastName: 'two'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.addJoinerToPrepaid(@prepaid, @joiner2)
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns an array of users', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0]._id).toEqual(@joiner._id+'')
expect(_.omit(body[0], '_id')).toEqual(_.pick(@joiner.toObject(), 'name', 'email', 'firstName', 'lastName'))
expect(_.omit(body[1], '_id')).toEqual(_.pick(@joiner2.toObject(), 'name', 'email', 'firstName', 'lastName'))
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is not the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle', ->
it 'populates startDate and endDate with default values', utils.wrap (done) ->
prepaid = new Prepaid({type: 'course' })
yield prepaid.save()
[res, body] = yield request.getAsync({url: getURL("/db/prepaid/#{prepaid.id}"), json: true})
expect(body.endDate).toBe(Prepaid.DEFAULT_END_DATE)
expect(body.startDate).toBe(Prepaid.DEFAULT_START_DATE)
done()
describe 'POST /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'when user is a joiner', ->
beforeEach ->
@joiner = yield utils.initUser({role: 'teacher', firstName: 'joiner', lastName: 'one'})
yield utils.loginUser(@admin)
yield utils.loginUser(@teacher)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
it 'returns 403 if maxRedeemers is reached', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, maxRedeemers: 0 })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('Too many redeemers')
done()
it 'returns 403 unless the user is the "creator" or a joiner', utils.wrap (done) ->
@otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@otherTeacher)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('You may not redeem licenses from this prepaid')
done()
it 'returns 403 if the prepaid is expired', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('This prepaid is expired')
done()
it 'is idempotent across prepaids collection', utils.wrap (done) ->
student = yield utils.initUser({ coursePrepaid: { _id: new Prepaid()._id } })
[res, body] = yield request.postAsync({uri: @url, json: { userID: student.id } })
expect(res.statusCode).toBe(200)
expect(body.redeemers.length).toBe(0)
done()
it 'is idempotent to itself', utils.wrap (done) ->
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'updates the user if their license is expired', utils.wrap (done) ->
yield utils.loginUser(@admin)
prepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().subtract(1, 'month').toISOString()
})
@student.set('coursePrepaid', _.pick(prepaid.toObject(), '_id', 'startDate', 'endDate'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'replaces a starter license with a full license', utils.wrap (done) ->
yield utils.loginUser(@admin)
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
@student.set('coursePrepaid', _.pick(oldPrepaid.toObject(), '_id', 'startDate', 'endDate', 'type'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(@prepaid._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'does NOT replace a full license with a starter license', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
yield @prepaid.save()
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(10, 'month').toISOString()
type: 'course'
})
yield oldPrepaid.redeem(@student)
yield utils.loginUser(@teacher)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
expect((yield Prepaid.findById(oldPrepaid._id)).get('redeemers').length).toBe(1)
done()
it 'adds includedCourseIDs to the user when redeeming', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
type: 'starter_license'
includedCourseIDs: ['course_1', 'course_2']
})
yield @prepaid.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync { uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')?.includedCourseIDs).toEqual(['course_1', 'course_2'])
expect(student.get('coursePrepaid')?.type).toEqual('starter_license')
done()
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'allows teachers with shared licenses to redeem', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(0)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'DELETE /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'removes a given user to the redeemers property', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
it 'returns 403 unless the user is the "creator"', utils.wrap (done) ->
otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(otherTeacher)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 unless the target user is in "redeemers"', utils.wrap (done) ->
otherStudent = yield utils.initUser({role: 'student'})
[res, body] = yield request.delAsync {uri: @url, json: { userID: otherStudent.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 403 if the prepaid is a starter license', utils.wrap ->
yield @prepaid.update({$set: {type: 'starter_license'}})
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'allows teachers with shared licenses to revoke', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
describe 'POST /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@joiner = yield utils.initUser({role: 'teacher'})
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
it 'adds a given user to the joiners property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
describe 'when a user has already been added to joiners', ->
it "doesn't add a user twice", utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(422)
expect(body.i18n).toBe('share_licenses.already_shared')
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
it 'returns 403 if user is not the creator', utils.wrap (done) ->
yield utils.loginUser(@joiner)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 403 if user is not a teacher', utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 if joiner is not a teacher', utils.wrap (done) ->
@nonteacher = yield utils.initUser()
[res, body] = yield request.postAsync {uri: @url, json: { userID: @nonteacher.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 if prepaid is not found', utils.wrap (done) ->
@url = getURL("/db/prepaid/123456789012345678901234/joiners")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/prepaid?creator=:id', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
@otherPrepaid = yield utils.makePrepaid({ creator: @admin.id })
@expiredPrepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
@unmigratedPrepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield @unmigratedPrepaid.update({$unset: { endDate: '', startDate: '' }})
yield utils.loginUser(@teacher)
done()
it 'return all prepaids for the creator', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@teacher.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(3)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
it 'returns 403 if the user tries to view another user\'s prepaids', utils.wrap (done) ->
anotherUser = yield utils.initUser()
url = getURL("/db/prepaid?creator=#{anotherUser.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(403)
done()
describe 'when includeShared is set to true', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@admin)
@joiner = yield utils.initUser({role: 'teacher'})
@joinersPrepaid = yield utils.makePrepaid({ creator: @joiner.id })
yield @prepaid.update({$set: { joiners: { userID: @joiner._id }}})
yield utils.loginUser(@joiner)
done()
it 'returns licenses that have been shared with the user', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@joiner.id}&includeShared=true")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(2)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
describe '/db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.populateProducts()
done()
prepaidURL = getURL('/db/prepaid')
headers = {'X-Change-Plan': 'true'}
joeData = null
stripe = require('stripe')(config.stripe.secretKey)
joeCode = null
verifyCoursePrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('course')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
verifySubscriptionPrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('subscription')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
expect(prepaid.properties?.couponID).toEqual('free')
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
it 'Clear database', (done) ->
clearModels [Course, CourseInstance, Payment, Prepaid, User], (err) ->
throw err if err
done()
it 'Anonymous creates prepaid code', (done) ->
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
done()
it 'Non-admin creates prepaid code', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
createPrepaid 'subscription', 4, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with type subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifySubscriptionPrepaid user1, body, done
it 'Admin creates prepaid code with type terminal_subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'terminal_subscription', 2, 3, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(body.creator).toEqual(user1.id)
expect(body.type).toEqual('terminal_subscription')
expect(body.maxRedeemers).toEqual(2)
expect(body.properties?.months).toEqual(3)
expect(body.code).toMatch(/^\w{8}$/)
done()
it 'Admin creates prepaid code with invalid type', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'bulldozer', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with no type specified', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid null, 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with invalid maxRedeemers', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 0, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Non-admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
prepaids = JSON.parse(body)
found = false
for p in prepaids
if p._id is prepaid._id
found = true
verifySubscriptionPrepaid user1, p, done
break
expect(found).toEqual(true)
done() unless found
describe 'Purchase course', ->
afterEach nockUtils.teardownNock
it 'Standard user purchases a prepaid for 0 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 0, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Standard user purchases a prepaid for 1 seat', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
it 'Standard user purchases a prepaid for 3 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
describe 'Purchase terminal_subscription', ->
afterEach nockUtils.teardownNock
it 'Anonymous submits a prepaid purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
logoutUser () ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
nockDone()
done()
it 'Should error if type isnt terminal_subscription', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'Should error if maxRedeemers is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, -1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-4.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, 'foo', token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-5.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: -1, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-6.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 'foo', 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers and months are less than 3', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-7.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 1, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'User submits valid prepaid code purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-8.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
stripeTokenID = token.id
loginJoe (joe) ->
joeData = joe.toObject()
joeData.stripe = {
token: stripeTokenID
planID: 'basic'
}
request.put {uri: getURL('/db/user'), json: joeData, headers: headers }, (err, res, body) ->
joeData = body
expect(res.statusCode).toBe(200)
expect(joeData.stripe.customerID).toBeDefined()
expect(firstSubscriptionID = joeData.stripe.subscriptionID).toBeDefined()
expect(joeData.stripe.planID).toBe('basic')
expect(joeData.stripe.token).toBeUndefined()
# TODO: is this test still valid after new token?
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(prepaid.type).toEqual('terminal_subscription')
expect(prepaid.code).toBeDefined()
# Saving this code for later tests
# TODO: don't make tests dependent on each other
joeCode = prepaid.code
expect(prepaid.creator).toBeDefined()
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.exhausted).toBe(false)
expect(prepaid.properties).toBeDefined()
expect(prepaid.properties.months).toEqual(3)
nockDone()
done()
it 'Should have logged a Payment with the correct amount', (done) ->
loginJoe (joe) ->
query =
purchaser: joe._id
Payment.find query, (err, payments) ->
expect(err).toBeNull()
expect(payments).not.toBeNull()
expect(payments.length).toEqual(1)
expect(payments[0].get('amount')).toEqual(900)
done()
it 'Anonymous cant redeem a prepaid code', (done) ->
logoutUser () ->
subscribeWithPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res?.statusCode).toEqual(401)
done()
it 'User cant redeem a nonexistant prepaid code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid 'abc123', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(404)
done()
it 'User cant redeem empty code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid '', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(422)
done()
it 'Anonymous cant fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
logoutUser () ->
fetchPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'User can fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
loginJoe (joe) ->
fetchPrepaid joeCode, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
expect(body).toBeDefined()
return done() unless body
prepaid = JSON.parse(body)
expect(prepaid.code).toEqual(joeCode)
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.properties?.months).toEqual(3)
done()
# TODO: Move redeem subscription prepaid code tests to subscription tests file
describe 'Subscription redeem tests', ->
afterEach nockUtils.teardownNock
it 'Creator can redeeem a prepaid code', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-1.json', (err, nockDone) ->
loginJoe (joe) ->
expect(joeCode).not.toBeNull()
expect(joeData.stripe?.customerID).toBeDefined()
expect(joeData.stripe?.subscriptionID).toBeDefined()
return done() unless joeData.stripe?.customerID
# joe has a stripe subscription, so test if the months are added to the end of it.
stripe.customers.retrieve joeData.stripe.customerID, (err, customer) =>
expect(err).toBeNull()
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
if subscription
stripeSubscriptionPeriodEndDate = new moment(subscription.current_period_end * 1000)
else
expect(stripeSubscriptionPeriodEndDate).toBeDefined()
return done()
subscribeWithPrepaid joeCode, (err, res, result) =>
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = stripeSubscriptionPeriodEndDate.add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(14000)
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
expect(subscription).toBeNull()
nockDone()
done()
it 'User can redeem a prepaid code', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Wont allow the same person to redeem twice', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Will return redeemed code as part of codes list', (done) ->
loginSam (sam) ->
request.get "#{getURL('/db/user')}/#{sam.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
done()
it 'Third user can redeem a prepaid code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Fourth user cannot redeem code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Can fetch a list of purchased and redeemed prepaid codes', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user) ->
purchasePrepaid 'terminal_subscription', months: 1, 3, token.id, (err, res, prepaid) ->
request.get "#{getURL('/db/user')}/#{user.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200);
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
expect(codes[0].maxRedeemers).toEqual(3)
expect(codes[0].properties).toBeDefined()
expect(codes[0].properties.months).toEqual(1)
nockDone()
done()
it 'thwarts query injections', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
code = { $exists: true }
subscribeWithPrepaidAsync = Promise.promisify(subscribeWithPrepaid)
res = yield subscribeWithPrepaidAsync(code)
expect(res.statusCode).toBe(422)
expect(res.body.message).toBe('You must provide a valid prepaid code.')
done()
it 'enforces the maximum number of redeemers in a race condition', utils.wrap (done) ->
nockDone = yield nockUtils.setupNockAsync 'db-sub-redeem-test-3.json'
stripe.tokens.createAsync = Promise.promisify(stripe.tokens.create, {context: stripe.tokens})
token = yield stripe.tokens.createAsync({
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
})
user = yield utils.initUser()
yield utils.loginUser(user)
codeRedeemers = 50
codeMonths = 3
redeemers = 51
purchasePrepaidAsync = Promise.promisify(purchasePrepaid, {multiArgs: true})
[res, prepaid] = yield purchasePrepaidAsync('terminal_subscription', months: codeMonths, codeRedeemers, token.id)
expect(prepaid).toBeDefined()
expect(prepaid.code).toBeDefined()
# Make 'threads', which are objects that encapsulate each user and their cookies
threads = []
for index in [0...redeemers]
thread = {}
thread.request = request.defaults({jar: request.jar()})
thread.request.postAsync = Promise.promisify(thread.request.post, { context: thread.request })
thread.user = yield utils.initUser()
yield utils.loginUser(thread.user, {request: thread.request})
threads.push(thread)
# Spawn all requests at once!
requests = []
options = {
url: getURL('/db/subscription/-/subscribe_prepaid')
json: { ppc: prepaid.code }
}
for thread in threads
requests.push(thread.request.postAsync(options))
# Wait until all requests finish, make sure all but one succeeded
responses = yield requests
redeemed = _.size(_.where(responses, {statusCode: 200}))
errors = _.size(_.where(responses, {statusCode: 403}))
expect(redeemed).toEqual(codeRedeemers)
expect(errors).toEqual(redeemers - codeRedeemers)
nockDone()
done()
| 76739 | require '../common'
config = require '../../../server_config'
moment = require 'moment'
{findStripeSubscription} = require '../../../server/lib/utils'
async = require 'async'
nockUtils = require '../nock-utils'
utils = require '../utils'
Promise = require 'bluebird'
Payment = require '../../../server/models/Payment'
Prepaid = require '../../../server/models/Prepaid'
User = require '../../../server/models/User'
Course = require '../../../server/models/Course'
CourseInstance = require '../../../server/models/CourseInstance'
request = require '../request'
describe 'POST /db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([User, Prepaid])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
done()
it 'creates a new prepaid for type "course"', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('creator').equals(user._id)).toBe(true)
expect(prepaid.get('code')).toBeDefined()
done()
it 'does not work for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(403)
done()
it 'accepts start and end dates', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
startDate: new Date().toISOString(2001,1,1)
endDate: new Date().toISOString(2010,1,1)
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('startDate')).toBeDefined()
expect(prepaid.get('endDate')).toBeDefined()
done()
describe 'GET /db/prepaid', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
prepaid = new Prepaid({creator: @user.id, type: 'course'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'starter_license'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'terminal_subscription'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'subscription'})
yield prepaid.save()
done()
describe 'when creator param', ->
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}"), json: true})
expect(body.length).toEqual(2)
done()
describe 'when creator and allTypes=true', ->
it 'returns all for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}&allTypes=true"), json: true})
expect(body.length).toEqual(4)
done()
describe 'GET /db/prepaid/:handle/creator', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
@url = getURL("/db/prepaid/#{@prepaid.id}/creator")
done()
describe 'when the prepaid ID is wrong', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
@url = getURL("/db/prepaid/123456789012345678901234/creator")
done()
it 'returns a NotFound error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(404)
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is a joiner', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher', firstName: '<NAME>', lastName: '<NAME>'})
@joiner2 = yield utils.initUser({role: 'teacher', firstName: '<NAME>', lastName: '<NAME>'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.addJoinerToPrepaid(@prepaid, @joiner2)
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns an array of users', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0]._id).toEqual(@joiner._id+'')
expect(_.omit(body[0], '_id')).toEqual(_.pick(@joiner.toObject(), 'name', 'email', 'firstName', 'lastName'))
expect(_.omit(body[1], '_id')).toEqual(_.pick(@joiner2.toObject(), 'name', 'email', 'firstName', 'lastName'))
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is not the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle', ->
it 'populates startDate and endDate with default values', utils.wrap (done) ->
prepaid = new Prepaid({type: 'course' })
yield prepaid.save()
[res, body] = yield request.getAsync({url: getURL("/db/prepaid/#{prepaid.id}"), json: true})
expect(body.endDate).toBe(Prepaid.DEFAULT_END_DATE)
expect(body.startDate).toBe(Prepaid.DEFAULT_START_DATE)
done()
describe 'POST /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'when user is a joiner', ->
beforeEach ->
@joiner = yield utils.initUser({role: 'teacher', firstName: '<NAME>', lastName: '<NAME>'})
yield utils.loginUser(@admin)
yield utils.loginUser(@teacher)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
it 'returns 403 if maxRedeemers is reached', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, maxRedeemers: 0 })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('Too many redeemers')
done()
it 'returns 403 unless the user is the "creator" or a joiner', utils.wrap (done) ->
@otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@otherTeacher)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('You may not redeem licenses from this prepaid')
done()
it 'returns 403 if the prepaid is expired', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('This prepaid is expired')
done()
it 'is idempotent across prepaids collection', utils.wrap (done) ->
student = yield utils.initUser({ coursePrepaid: { _id: new Prepaid()._id } })
[res, body] = yield request.postAsync({uri: @url, json: { userID: student.id } })
expect(res.statusCode).toBe(200)
expect(body.redeemers.length).toBe(0)
done()
it 'is idempotent to itself', utils.wrap (done) ->
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'updates the user if their license is expired', utils.wrap (done) ->
yield utils.loginUser(@admin)
prepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().subtract(1, 'month').toISOString()
})
@student.set('coursePrepaid', _.pick(prepaid.toObject(), '_id', 'startDate', 'endDate'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'replaces a starter license with a full license', utils.wrap (done) ->
yield utils.loginUser(@admin)
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
@student.set('coursePrepaid', _.pick(oldPrepaid.toObject(), '_id', 'startDate', 'endDate', 'type'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(@prepaid._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'does NOT replace a full license with a starter license', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
yield @prepaid.save()
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(10, 'month').toISOString()
type: 'course'
})
yield oldPrepaid.redeem(@student)
yield utils.loginUser(@teacher)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
expect((yield Prepaid.findById(oldPrepaid._id)).get('redeemers').length).toBe(1)
done()
it 'adds includedCourseIDs to the user when redeeming', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
type: 'starter_license'
includedCourseIDs: ['course_1', 'course_2']
})
yield @prepaid.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync { uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')?.includedCourseIDs).toEqual(['course_1', 'course_2'])
expect(student.get('coursePrepaid')?.type).toEqual('starter_license')
done()
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'allows teachers with shared licenses to redeem', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(0)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'DELETE /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'removes a given user to the redeemers property', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
it 'returns 403 unless the user is the "creator"', utils.wrap (done) ->
otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(otherTeacher)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 unless the target user is in "redeemers"', utils.wrap (done) ->
otherStudent = yield utils.initUser({role: 'student'})
[res, body] = yield request.delAsync {uri: @url, json: { userID: otherStudent.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 403 if the prepaid is a starter license', utils.wrap ->
yield @prepaid.update({$set: {type: 'starter_license'}})
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'allows teachers with shared licenses to revoke', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
describe 'POST /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@joiner = yield utils.initUser({role: 'teacher'})
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
it 'adds a given user to the joiners property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
describe 'when a user has already been added to joiners', ->
it "doesn't add a user twice", utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(422)
expect(body.i18n).toBe('share_licenses.already_shared')
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
it 'returns 403 if user is not the creator', utils.wrap (done) ->
yield utils.loginUser(@joiner)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 403 if user is not a teacher', utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 if joiner is not a teacher', utils.wrap (done) ->
@nonteacher = yield utils.initUser()
[res, body] = yield request.postAsync {uri: @url, json: { userID: @nonteacher.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 if prepaid is not found', utils.wrap (done) ->
@url = getURL("/db/prepaid/123456789012345678901234/joiners")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/prepaid?creator=:id', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
@otherPrepaid = yield utils.makePrepaid({ creator: @admin.id })
@expiredPrepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
@unmigratedPrepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield @unmigratedPrepaid.update({$unset: { endDate: '', startDate: '' }})
yield utils.loginUser(@teacher)
done()
it 'return all prepaids for the creator', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@teacher.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(3)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
it 'returns 403 if the user tries to view another user\'s prepaids', utils.wrap (done) ->
anotherUser = yield utils.initUser()
url = getURL("/db/prepaid?creator=#{anotherUser.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(403)
done()
describe 'when includeShared is set to true', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@admin)
@joiner = yield utils.initUser({role: 'teacher'})
@joinersPrepaid = yield utils.makePrepaid({ creator: @joiner.id })
yield @prepaid.update({$set: { joiners: { userID: @joiner._id }}})
yield utils.loginUser(@joiner)
done()
it 'returns licenses that have been shared with the user', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@joiner.id}&includeShared=true")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(2)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
describe '/db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.populateProducts()
done()
prepaidURL = getURL('/db/prepaid')
headers = {'X-Change-Plan': 'true'}
joeData = null
stripe = require('stripe')(config.stripe.secretKey)
joeCode = null
verifyCoursePrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('course')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
verifySubscriptionPrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('subscription')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
expect(prepaid.properties?.couponID).toEqual('free')
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
it 'Clear database', (done) ->
clearModels [Course, CourseInstance, Payment, Prepaid, User], (err) ->
throw err if err
done()
it 'Anonymous creates prepaid code', (done) ->
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
done()
it 'Non-admin creates prepaid code', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
createPrepaid 'subscription', 4, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with type subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifySubscriptionPrepaid user1, body, done
it 'Admin creates prepaid code with type terminal_subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'terminal_subscription', 2, 3, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(body.creator).toEqual(user1.id)
expect(body.type).toEqual('terminal_subscription')
expect(body.maxRedeemers).toEqual(2)
expect(body.properties?.months).toEqual(3)
expect(body.code).toMatch(/^\w{8}$/)
done()
it 'Admin creates prepaid code with invalid type', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'bulldozer', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with no type specified', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid null, 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with invalid maxRedeemers', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 0, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Non-admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
prepaids = JSON.parse(body)
found = false
for p in prepaids
if p._id is prepaid._id
found = true
verifySubscriptionPrepaid user1, p, done
break
expect(found).toEqual(true)
done() unless found
describe 'Purchase course', ->
afterEach nockUtils.teardownNock
it 'Standard user purchases a prepaid for 0 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 0, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Standard user purchases a prepaid for 1 seat', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
it 'Standard user purchases a prepaid for 3 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
describe 'Purchase terminal_subscription', ->
afterEach nockUtils.teardownNock
it 'Anonymous submits a prepaid purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
logoutUser () ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
nockDone()
done()
it 'Should error if type isnt terminal_subscription', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'Should error if maxRedeemers is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, -1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-4.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, 'foo', token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-5.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: -1, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-6.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 'foo', 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers and months are less than 3', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-7.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 1, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'User submits valid prepaid code purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-8.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
stripeTokenID = token.id
loginJoe (joe) ->
joeData = joe.toObject()
joeData.stripe = {
token: stripeTokenID
planID: 'basic'
}
request.put {uri: getURL('/db/user'), json: joeData, headers: headers }, (err, res, body) ->
joeData = body
expect(res.statusCode).toBe(200)
expect(joeData.stripe.customerID).toBeDefined()
expect(firstSubscriptionID = joeData.stripe.subscriptionID).toBeDefined()
expect(joeData.stripe.planID).toBe('basic')
expect(joeData.stripe.token).toBeUndefined()
# TODO: is this test still valid after new token?
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(prepaid.type).toEqual('terminal_subscription')
expect(prepaid.code).toBeDefined()
# Saving this code for later tests
# TODO: don't make tests dependent on each other
joeCode = prepaid.code
expect(prepaid.creator).toBeDefined()
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.exhausted).toBe(false)
expect(prepaid.properties).toBeDefined()
expect(prepaid.properties.months).toEqual(3)
nockDone()
done()
it 'Should have logged a Payment with the correct amount', (done) ->
loginJoe (joe) ->
query =
purchaser: joe._id
Payment.find query, (err, payments) ->
expect(err).toBeNull()
expect(payments).not.toBeNull()
expect(payments.length).toEqual(1)
expect(payments[0].get('amount')).toEqual(900)
done()
it 'Anonymous cant redeem a prepaid code', (done) ->
logoutUser () ->
subscribeWithPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res?.statusCode).toEqual(401)
done()
it 'User cant redeem a nonexistant prepaid code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid 'abc123', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(404)
done()
it 'User cant redeem empty code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid '', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(422)
done()
it 'Anonymous cant fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
logoutUser () ->
fetchPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'User can fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
loginJoe (joe) ->
fetchPrepaid joeCode, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
expect(body).toBeDefined()
return done() unless body
prepaid = JSON.parse(body)
expect(prepaid.code).toEqual(joeCode)
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.properties?.months).toEqual(3)
done()
# TODO: Move redeem subscription prepaid code tests to subscription tests file
describe 'Subscription redeem tests', ->
afterEach nockUtils.teardownNock
it 'Creator can redeeem a prepaid code', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-1.json', (err, nockDone) ->
loginJoe (joe) ->
expect(joeCode).not.toBeNull()
expect(joeData.stripe?.customerID).toBeDefined()
expect(joeData.stripe?.subscriptionID).toBeDefined()
return done() unless joeData.stripe?.customerID
# joe has a stripe subscription, so test if the months are added to the end of it.
stripe.customers.retrieve joeData.stripe.customerID, (err, customer) =>
expect(err).toBeNull()
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
if subscription
stripeSubscriptionPeriodEndDate = new moment(subscription.current_period_end * 1000)
else
expect(stripeSubscriptionPeriodEndDate).toBeDefined()
return done()
subscribeWithPrepaid joeCode, (err, res, result) =>
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = stripeSubscriptionPeriodEndDate.add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(14000)
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
expect(subscription).toBeNull()
nockDone()
done()
it 'User can redeem a prepaid code', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Wont allow the same person to redeem twice', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Will return redeemed code as part of codes list', (done) ->
loginSam (sam) ->
request.get "#{getURL('/db/user')}/#{sam.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
done()
it 'Third user can redeem a prepaid code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Fourth user cannot redeem code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Can fetch a list of purchased and redeemed prepaid codes', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user) ->
purchasePrepaid 'terminal_subscription', months: 1, 3, token.id, (err, res, prepaid) ->
request.get "#{getURL('/db/user')}/#{user.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200);
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
expect(codes[0].maxRedeemers).toEqual(3)
expect(codes[0].properties).toBeDefined()
expect(codes[0].properties.months).toEqual(1)
nockDone()
done()
it 'thwarts query injections', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
code = { $exists: true }
subscribeWithPrepaidAsync = Promise.promisify(subscribeWithPrepaid)
res = yield subscribeWithPrepaidAsync(code)
expect(res.statusCode).toBe(422)
expect(res.body.message).toBe('You must provide a valid prepaid code.')
done()
it 'enforces the maximum number of redeemers in a race condition', utils.wrap (done) ->
nockDone = yield nockUtils.setupNockAsync 'db-sub-redeem-test-3.json'
stripe.tokens.createAsync = Promise.promisify(stripe.tokens.create, {context: stripe.tokens})
token = yield stripe.tokens.createAsync({
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
})
user = yield utils.initUser()
yield utils.loginUser(user)
codeRedeemers = 50
codeMonths = 3
redeemers = 51
purchasePrepaidAsync = Promise.promisify(purchasePrepaid, {multiArgs: true})
[res, prepaid] = yield purchasePrepaidAsync('terminal_subscription', months: codeMonths, codeRedeemers, token.id)
expect(prepaid).toBeDefined()
expect(prepaid.code).toBeDefined()
# Make 'threads', which are objects that encapsulate each user and their cookies
threads = []
for index in [0...redeemers]
thread = {}
thread.request = request.defaults({jar: request.jar()})
thread.request.postAsync = Promise.promisify(thread.request.post, { context: thread.request })
thread.user = yield utils.initUser()
yield utils.loginUser(thread.user, {request: thread.request})
threads.push(thread)
# Spawn all requests at once!
requests = []
options = {
url: getURL('/db/subscription/-/subscribe_prepaid')
json: { ppc: prepaid.code }
}
for thread in threads
requests.push(thread.request.postAsync(options))
# Wait until all requests finish, make sure all but one succeeded
responses = yield requests
redeemed = _.size(_.where(responses, {statusCode: 200}))
errors = _.size(_.where(responses, {statusCode: 403}))
expect(redeemed).toEqual(codeRedeemers)
expect(errors).toEqual(redeemers - codeRedeemers)
nockDone()
done()
| true | require '../common'
config = require '../../../server_config'
moment = require 'moment'
{findStripeSubscription} = require '../../../server/lib/utils'
async = require 'async'
nockUtils = require '../nock-utils'
utils = require '../utils'
Promise = require 'bluebird'
Payment = require '../../../server/models/Payment'
Prepaid = require '../../../server/models/Prepaid'
User = require '../../../server/models/User'
Course = require '../../../server/models/Course'
CourseInstance = require '../../../server/models/CourseInstance'
request = require '../request'
describe 'POST /db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([User, Prepaid])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
done()
it 'creates a new prepaid for type "course"', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('creator').equals(user._id)).toBe(true)
expect(prepaid.get('code')).toBeDefined()
done()
it 'does not work for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
}})
expect(res.statusCode).toBe(403)
done()
it 'accepts start and end dates', utils.wrap (done) ->
user = yield utils.initUser()
[res, body] = yield request.postAsync({url: getURL('/db/prepaid'), json: {
type: 'course'
creator: user.id
startDate: new Date().toISOString(2001,1,1)
endDate: new Date().toISOString(2010,1,1)
}})
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(res.body._id)
expect(prepaid).toBeDefined()
expect(prepaid.get('startDate')).toBeDefined()
expect(prepaid.get('endDate')).toBeDefined()
done()
describe 'GET /db/prepaid', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
prepaid = new Prepaid({creator: @user.id, type: 'course'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'starter_license'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'terminal_subscription'})
yield prepaid.save()
prepaid = new Prepaid({creator: @user.id, type: 'subscription'})
yield prepaid.save()
done()
describe 'when creator param', ->
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}"), json: true})
expect(body.length).toEqual(2)
done()
describe 'when creator and allTypes=true', ->
it 'returns all for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: getURL("/db/prepaid?creator=#{@user.id}&allTypes=true"), json: true})
expect(body.length).toEqual(4)
done()
describe 'GET /db/prepaid/:handle/creator', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
@url = getURL("/db/prepaid/#{@prepaid.id}/creator")
done()
describe 'when the prepaid ID is wrong', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
@url = getURL("/db/prepaid/123456789012345678901234/creator")
done()
it 'returns a NotFound error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(404)
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is a joiner', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns only course and starter_license prepaids for creator', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.email).toEqual(@creator.email)
expect(body.name).toEqual(@creator.name)
expect(body.firstName).toEqual(@creator.firstName)
expect(body.lastName).toEqual(@creator.lastName)
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher', firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})
@joiner2 = yield utils.initUser({role: 'teacher', firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.addJoinerToPrepaid(@prepaid, @joiner2)
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
describe 'when user is the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@creator)
done()
it 'returns an array of users', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0]._id).toEqual(@joiner._id+'')
expect(_.omit(body[0], '_id')).toEqual(_.pick(@joiner.toObject(), 'name', 'email', 'firstName', 'lastName'))
expect(_.omit(body[1], '_id')).toEqual(_.pick(@joiner2.toObject(), 'name', 'email', 'firstName', 'lastName'))
done()
describe 'when user is not a teacher', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is not the creator', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@joiner)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'when user is neither the creator nor joiner', ->
beforeEach utils.wrap (done) ->
@user = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@user)
done()
it 'returns a Forbidden Error', utils.wrap (done) ->
[res, body] = yield request.getAsync({url: @url, json: true})
expect(res.statusCode).toBe(403)
expect(body.email).toBeUndefined()
done()
describe 'GET /db/prepaid/:handle', ->
it 'populates startDate and endDate with default values', utils.wrap (done) ->
prepaid = new Prepaid({type: 'course' })
yield prepaid.save()
[res, body] = yield request.getAsync({url: getURL("/db/prepaid/#{prepaid.id}"), json: true})
expect(body.endDate).toBe(Prepaid.DEFAULT_END_DATE)
expect(body.startDate).toBe(Prepaid.DEFAULT_START_DATE)
done()
describe 'POST /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'when user is a joiner', ->
beforeEach ->
@joiner = yield utils.initUser({role: 'teacher', firstName: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI'})
yield utils.loginUser(@admin)
yield utils.loginUser(@teacher)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
it 'adds a given user to the redeemers property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
it 'returns 403 if maxRedeemers is reached', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, maxRedeemers: 0 })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('Too many redeemers')
done()
it 'returns 403 unless the user is the "creator" or a joiner', utils.wrap (done) ->
@otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(@otherTeacher)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('You may not redeem licenses from this prepaid')
done()
it 'returns 403 if the prepaid is expired', utils.wrap (done) ->
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
prepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
url = getURL("/db/prepaid/#{prepaid.id}/redeemers")
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync({uri: url, json: { userID: @student.id } })
expect(res.statusCode).toBe(403)
expect(res.body.message).toBe('This prepaid is expired')
done()
it 'is idempotent across prepaids collection', utils.wrap (done) ->
student = yield utils.initUser({ coursePrepaid: { _id: new Prepaid()._id } })
[res, body] = yield request.postAsync({uri: @url, json: { userID: student.id } })
expect(res.statusCode).toBe(200)
expect(body.redeemers.length).toBe(0)
done()
it 'is idempotent to itself', utils.wrap (done) ->
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync({uri: @url, json: { userID: @student.id } })
expect(body.redeemers?.length).toBe(1)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'updates the user if their license is expired', utils.wrap (done) ->
yield utils.loginUser(@admin)
prepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().subtract(1, 'month').toISOString()
})
@student.set('coursePrepaid', _.pick(prepaid.toObject(), '_id', 'startDate', 'endDate'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'replaces a starter license with a full license', utils.wrap (done) ->
yield utils.loginUser(@admin)
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
@student.set('coursePrepaid', _.pick(oldPrepaid.toObject(), '_id', 'startDate', 'endDate', 'type'))
yield @student.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(@prepaid._id)
expect(prepaid.get('redeemers').length).toBe(1)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
done()
it 'does NOT replace a full license with a starter license', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(4, 'month').toISOString()
type: 'starter_license'
})
yield @prepaid.save()
oldPrepaid = yield utils.makePrepaid({
creator: @teacher.id
startDate: moment().subtract(2, 'month').toISOString()
endDate: moment().add(10, 'month').toISOString()
type: 'course'
})
yield oldPrepaid.redeem(@student)
yield utils.loginUser(@teacher)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')._id.equals(oldPrepaid._id)).toBe(true)
expect(student.get('coursePrepaid')._id.toString()).toBe(oldPrepaid._id.toString())
expect((yield Prepaid.findById(oldPrepaid._id)).get('redeemers').length).toBe(1)
done()
it 'adds includedCourseIDs to the user when redeeming', utils.wrap (done) ->
yield utils.loginUser(@admin)
@prepaid.set({
type: 'starter_license'
includedCourseIDs: ['course_1', 'course_2']
})
yield @prepaid.save()
yield utils.loginUser(@teacher)
[res, body] = yield request.postAsync { uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')?.includedCourseIDs).toEqual(['course_1', 'course_2'])
expect(student.get('coursePrepaid')?.type).toEqual('starter_license')
done()
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
done()
it 'allows teachers with shared licenses to redeem', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(0)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(1)
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(1)
@student = yield User.findById(@student.id)
expect(@student.get('coursePrepaid')._id.equals(@prepaid._id)).toBe(true)
expect(@student.get('role')).toBe('student')
done()
describe 'DELETE /db/prepaid/:handle/redeemers', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'removes a given user to the redeemers property', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
it 'returns 403 unless the user is the "creator"', utils.wrap (done) ->
otherTeacher = yield utils.initUser({role: 'teacher'})
yield utils.loginUser(otherTeacher)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 unless the target user is in "redeemers"', utils.wrap (done) ->
otherStudent = yield utils.initUser({role: 'student'})
[res, body] = yield request.delAsync {uri: @url, json: { userID: otherStudent.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 403 if the prepaid is a starter license', utils.wrap ->
yield @prepaid.update({$set: {type: 'starter_license'}})
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(403)
describe 'when user is a joiner on a shared license', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@creator = yield utils.initUser({role: 'teacher'})
@joiner = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @creator.id })
yield utils.loginUser(@creator)
yield utils.addJoinerToPrepaid(@prepaid, @joiner)
yield utils.loginUser(@joiner)
@student = yield utils.initUser()
@url = getURL("/db/prepaid/#{@prepaid.id}/redeemers")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @student.id } }
expect(res.statusCode).toBe(201)
done()
it 'allows teachers with shared licenses to revoke', utils.wrap (done) ->
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('redeemers').length).toBe(1)
[res, body] = yield request.delAsync {uri: @url, json: { userID: @student.id } }
expect(body.redeemers.length).toBe(0)
expect(res.statusCode).toBe(200)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('redeemers').length).toBe(0)
student = yield User.findById(@student.id)
expect(student.get('coursePrepaid')).toBeUndefined()
done()
describe 'POST /db/prepaid/:handle/joiners', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield utils.loginUser(@teacher)
@joiner = yield utils.initUser({role: 'teacher'})
@url = getURL("/db/prepaid/#{@prepaid.id}/joiners")
done()
it 'adds a given user to the joiners property', utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
prepaid = yield Prepaid.findById(body._id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
describe 'when a user has already been added to joiners', ->
it "doesn't add a user twice", utils.wrap (done) ->
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(422)
expect(body.i18n).toBe('share_licenses.already_shared')
prepaid = yield Prepaid.findById(@prepaid.id)
expect(prepaid.get('joiners').length).toBe(1)
expect(prepaid.get('joiners')[0].userID + '').toBe(@joiner.id)
done()
it 'returns 403 if user is not the creator', utils.wrap (done) ->
yield utils.loginUser(@joiner)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 403 if user is not a teacher', utils.wrap (done) ->
@user = yield utils.initUser()
yield utils.loginUser(@user)
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(403)
done()
it 'returns 422 if joiner is not a teacher', utils.wrap (done) ->
@nonteacher = yield utils.initUser()
[res, body] = yield request.postAsync {uri: @url, json: { userID: @nonteacher.id } }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 if prepaid is not found', utils.wrap (done) ->
@url = getURL("/db/prepaid/123456789012345678901234/joiners")
[res, body] = yield request.postAsync {uri: @url, json: { userID: @joiner.id } }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/prepaid?creator=:id', ->
beforeEach utils.wrap (done) ->
yield utils.clearModels([Course, CourseInstance, Payment, Prepaid, User])
@teacher = yield utils.initUser({role: 'teacher'})
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
@prepaid = yield utils.makePrepaid({ creator: @teacher.id })
@otherPrepaid = yield utils.makePrepaid({ creator: @admin.id })
@expiredPrepaid = yield utils.makePrepaid({ creator: @teacher.id, endDate: moment().subtract(1, 'month').toISOString() })
@unmigratedPrepaid = yield utils.makePrepaid({ creator: @teacher.id })
yield @unmigratedPrepaid.update({$unset: { endDate: '', startDate: '' }})
yield utils.loginUser(@teacher)
done()
it 'return all prepaids for the creator', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@teacher.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(3)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
it 'returns 403 if the user tries to view another user\'s prepaids', utils.wrap (done) ->
anotherUser = yield utils.initUser()
url = getURL("/db/prepaid?creator=#{anotherUser.id}")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(403)
done()
describe 'when includeShared is set to true', ->
beforeEach utils.wrap (done) ->
yield utils.loginUser(@admin)
@joiner = yield utils.initUser({role: 'teacher'})
@joinersPrepaid = yield utils.makePrepaid({ creator: @joiner.id })
yield @prepaid.update({$set: { joiners: { userID: @joiner._id }}})
yield utils.loginUser(@joiner)
done()
it 'returns licenses that have been shared with the user', utils.wrap (done) ->
url = getURL("/db/prepaid?creator=#{@joiner.id}&includeShared=true")
[res, body] = yield request.getAsync({uri: url, json: true})
expect(res.statusCode).toBe(200)
expect(res.body.length).toEqual(2)
if _.any((prepaid._id is @otherPrepaid.id for prepaid in res.body))
fail('Found the admin prepaid in response')
for prepaid in res.body
unless prepaid.startDate and prepaid.endDate
fail('All prepaids should have start and end dates')
expect(res.body[0]._id).toBe(@prepaid.id)
done()
describe '/db/prepaid', ->
beforeEach utils.wrap (done) ->
yield utils.populateProducts()
done()
prepaidURL = getURL('/db/prepaid')
headers = {'X-Change-Plan': 'true'}
joeData = null
stripe = require('stripe')(config.stripe.secretKey)
joeCode = null
verifyCoursePrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('course')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
verifySubscriptionPrepaid = (user, prepaid, done) ->
expect(prepaid.creator).toEqual(user.id)
expect(prepaid.type).toEqual('subscription')
expect(prepaid.maxRedeemers).toBeGreaterThan(0)
expect(prepaid.code).toMatch(/^\w{8}$/)
expect(prepaid.properties?.couponID).toEqual('free')
return done() if user.isAdmin()
Payment.findOne {prepaidID: new ObjectId(prepaid._id)}, (err, payment) ->
expect(err).toBeNull()
expect(payment).not.toBeNull()
expect(payment?.get('purchaser')).toEqual(user._id)
done()
it 'Clear database', (done) ->
clearModels [Course, CourseInstance, Payment, Prepaid, User], (err) ->
throw err if err
done()
it 'Anonymous creates prepaid code', (done) ->
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
done()
it 'Non-admin creates prepaid code', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
createPrepaid 'subscription', 4, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with type subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifySubscriptionPrepaid user1, body, done
it 'Admin creates prepaid code with type terminal_subscription', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'terminal_subscription', 2, 3, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(body.creator).toEqual(user1.id)
expect(body.type).toEqual('terminal_subscription')
expect(body.maxRedeemers).toEqual(2)
expect(body.properties?.months).toEqual(3)
expect(body.code).toMatch(/^\w{8}$/)
done()
it 'Admin creates prepaid code with invalid type', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'bulldozer', 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with no type specified', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid null, 1, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin creates prepaid code with invalid maxRedeemers', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 0, 0, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Non-admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
expect(user1.isAdmin()).toEqual(false)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
done()
it 'Admin requests /db/prepaid', (done) ->
loginNewUser (user1) ->
user1.set('permissions', ['admin'])
user1.save (err, user1) ->
expect(err).toBeNull()
expect(user1.isAdmin()).toEqual(true)
createPrepaid 'subscription', 1, 0, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
request.get {uri: prepaidURL}, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
prepaids = JSON.parse(body)
found = false
for p in prepaids
if p._id is prepaid._id
found = true
verifySubscriptionPrepaid user1, p, done
break
expect(found).toEqual(true)
done() unless found
describe 'Purchase course', ->
afterEach nockUtils.teardownNock
it 'Standard user purchases a prepaid for 0 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 0, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Standard user purchases a prepaid for 1 seat', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
it 'Standard user purchases a prepaid for 3 seats', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-course-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'course', {}, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
verifyCoursePrepaid user1, prepaid, ->
nockDone()
done()
describe 'Purchase terminal_subscription', ->
afterEach nockUtils.teardownNock
it 'Anonymous submits a prepaid purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-1.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
logoutUser () ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(401)
nockDone()
done()
it 'Should error if type isnt terminal_subscription', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'Should error if maxRedeemers is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-3.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, -1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-4.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 3, 'foo', token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is -1', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-5.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: -1, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if months is foo', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-6.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 'foo', 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(422)
nockDone()
done()
it 'Should error if maxRedeemers and months are less than 3', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-7.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user1) ->
purchasePrepaid 'terminal_subscription', months: 1, 1, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(403)
nockDone()
done()
it 'User submits valid prepaid code purchase', (done) ->
nockUtils.setupNock 'db-prepaid-purchase-term-sub-test-8.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
stripeTokenID = token.id
loginJoe (joe) ->
joeData = joe.toObject()
joeData.stripe = {
token: stripeTokenID
planID: 'basic'
}
request.put {uri: getURL('/db/user'), json: joeData, headers: headers }, (err, res, body) ->
joeData = body
expect(res.statusCode).toBe(200)
expect(joeData.stripe.customerID).toBeDefined()
expect(firstSubscriptionID = joeData.stripe.subscriptionID).toBeDefined()
expect(joeData.stripe.planID).toBe('basic')
expect(joeData.stripe.token).toBeUndefined()
# TODO: is this test still valid after new token?
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
purchasePrepaid 'terminal_subscription', months: 3, 3, token.id, (err, res, prepaid) ->
expect(err).toBeNull()
expect(res.statusCode).toBe(200)
expect(prepaid.type).toEqual('terminal_subscription')
expect(prepaid.code).toBeDefined()
# Saving this code for later tests
# TODO: don't make tests dependent on each other
joeCode = prepaid.code
expect(prepaid.creator).toBeDefined()
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.exhausted).toBe(false)
expect(prepaid.properties).toBeDefined()
expect(prepaid.properties.months).toEqual(3)
nockDone()
done()
it 'Should have logged a Payment with the correct amount', (done) ->
loginJoe (joe) ->
query =
purchaser: joe._id
Payment.find query, (err, payments) ->
expect(err).toBeNull()
expect(payments).not.toBeNull()
expect(payments.length).toEqual(1)
expect(payments[0].get('amount')).toEqual(900)
done()
it 'Anonymous cant redeem a prepaid code', (done) ->
logoutUser () ->
subscribeWithPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res?.statusCode).toEqual(401)
done()
it 'User cant redeem a nonexistant prepaid code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid 'abc123', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(404)
done()
it 'User cant redeem empty code', (done) ->
loginJoe (joe) ->
subscribeWithPrepaid '', (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(422)
done()
it 'Anonymous cant fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
logoutUser () ->
fetchPrepaid joeCode, (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'User can fetch a prepaid code', (done) ->
expect(joeCode).not.toBeNull()
loginJoe (joe) ->
fetchPrepaid joeCode, (err, res, body) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
expect(body).toBeDefined()
return done() unless body
prepaid = JSON.parse(body)
expect(prepaid.code).toEqual(joeCode)
expect(prepaid.maxRedeemers).toEqual(3)
expect(prepaid.properties?.months).toEqual(3)
done()
# TODO: Move redeem subscription prepaid code tests to subscription tests file
describe 'Subscription redeem tests', ->
afterEach nockUtils.teardownNock
it 'Creator can redeeem a prepaid code', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-1.json', (err, nockDone) ->
loginJoe (joe) ->
expect(joeCode).not.toBeNull()
expect(joeData.stripe?.customerID).toBeDefined()
expect(joeData.stripe?.subscriptionID).toBeDefined()
return done() unless joeData.stripe?.customerID
# joe has a stripe subscription, so test if the months are added to the end of it.
stripe.customers.retrieve joeData.stripe.customerID, (err, customer) =>
expect(err).toBeNull()
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
if subscription
stripeSubscriptionPeriodEndDate = new moment(subscription.current_period_end * 1000)
else
expect(stripeSubscriptionPeriodEndDate).toBeDefined()
return done()
subscribeWithPrepaid joeCode, (err, res, result) =>
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = stripeSubscriptionPeriodEndDate.add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(14000)
findStripeSubscription customer.id, subscriptionID: joeData.stripe?.subscriptionID, (err, subscription) =>
expect(subscription).toBeNull()
nockDone()
done()
it 'User can redeem a prepaid code', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Wont allow the same person to redeem twice', (done) ->
loginSam (sam) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Will return redeemed code as part of codes list', (done) ->
loginSam (sam) ->
request.get "#{getURL('/db/user')}/#{sam.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
done()
it 'Third user can redeem a prepaid code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200)
endDate = new moment().add(3, 'months').toISOString().substring(0, 10)
expect(result?.stripe?.free.substring(0,10)).toEqual(endDate)
expect(result?.purchased?.gems).toEqual(10500)
done()
it 'Fourth user cannot redeem code', (done) ->
loginNewUser (user) ->
subscribeWithPrepaid joeCode, (err, res, result) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(403)
done()
it 'Can fetch a list of purchased and redeemed prepaid codes', (done) ->
nockUtils.setupNock 'db-sub-redeem-test-2.json', (err, nockDone) ->
stripe.tokens.create {
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
}, (err, token) ->
loginNewUser (user) ->
purchasePrepaid 'terminal_subscription', months: 1, 3, token.id, (err, res, prepaid) ->
request.get "#{getURL('/db/user')}/#{user.id}/prepaid_codes", (err, res) ->
expect(err).toBeNull()
expect(res.statusCode).toEqual(200);
codes = JSON.parse res.body
expect(codes.length).toEqual(1)
expect(codes[0].maxRedeemers).toEqual(3)
expect(codes[0].properties).toBeDefined()
expect(codes[0].properties.months).toEqual(1)
nockDone()
done()
it 'thwarts query injections', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
code = { $exists: true }
subscribeWithPrepaidAsync = Promise.promisify(subscribeWithPrepaid)
res = yield subscribeWithPrepaidAsync(code)
expect(res.statusCode).toBe(422)
expect(res.body.message).toBe('You must provide a valid prepaid code.')
done()
it 'enforces the maximum number of redeemers in a race condition', utils.wrap (done) ->
nockDone = yield nockUtils.setupNockAsync 'db-sub-redeem-test-3.json'
stripe.tokens.createAsync = Promise.promisify(stripe.tokens.create, {context: stripe.tokens})
token = yield stripe.tokens.createAsync({
card: { number: '4242424242424242', exp_month: 12, exp_year: 2020, cvc: '123' }
})
user = yield utils.initUser()
yield utils.loginUser(user)
codeRedeemers = 50
codeMonths = 3
redeemers = 51
purchasePrepaidAsync = Promise.promisify(purchasePrepaid, {multiArgs: true})
[res, prepaid] = yield purchasePrepaidAsync('terminal_subscription', months: codeMonths, codeRedeemers, token.id)
expect(prepaid).toBeDefined()
expect(prepaid.code).toBeDefined()
# Make 'threads', which are objects that encapsulate each user and their cookies
threads = []
for index in [0...redeemers]
thread = {}
thread.request = request.defaults({jar: request.jar()})
thread.request.postAsync = Promise.promisify(thread.request.post, { context: thread.request })
thread.user = yield utils.initUser()
yield utils.loginUser(thread.user, {request: thread.request})
threads.push(thread)
# Spawn all requests at once!
requests = []
options = {
url: getURL('/db/subscription/-/subscribe_prepaid')
json: { ppc: prepaid.code }
}
for thread in threads
requests.push(thread.request.postAsync(options))
# Wait until all requests finish, make sure all but one succeeded
responses = yield requests
redeemed = _.size(_.where(responses, {statusCode: 200}))
errors = _.size(_.where(responses, {statusCode: 403}))
expect(redeemed).toEqual(codeRedeemers)
expect(errors).toEqual(redeemers - codeRedeemers)
nockDone()
done()
|
[
{
"context": "\nGulp configure script.\n@create 2014-10-07\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nloadTasks = (path)->\n inclu",
"end": 66,
"score": 0.9998943209648132,
"start": 54,
"tag": "NAME",
"value": "KoutarouYabe"
},
{
"context": " script.\n@create 2014-10-07\n@author ... | gulpfile.coffee | ky0615/atc_tram | 0 | ###
Gulp configure script.
@create 2014-10-07
@author KoutarouYabe <idolm@ster.pw>
###
loadTasks = (path)->
includeAll(
dirname: require("path").resolve __dirname, path
filter: /(.+)\.(js|coffee)$/
) or {}
invokeConfigFn = (tasks) ->
for taskName of tasks
plugins.error = (error)->
plugins.util.log error.toString()
tasks[taskName] gulp, plugins, path if tasks.hasOwnProperty(taskName)
gulp = require "gulp"
plugins = require("gulp-load-plugins")(
pattern: [
"gulp-*"
"merge-*"
"run-*"
"main-*"
]
replaceString: /\bgulp[\-.]|run[\-.]|merge[\-.]|main[\-.]/
camelizePluginName: true
lazy: true
)
plugins.config =
destPath: "./www/"
env: process.env.ENV_VARIABLE || "development"
plugins.colors = require "colors"
path = require "path"
includeAll = require "include-all"
taskConfigurations = loadTasks "./tasks/config"
registerDefinitions = loadTasks "./tasks/register"
if not registerDefinitions.default
registerDefinitions.default = (gulp)->
gulp.task 'default', []
invokeConfigFn taskConfigurations
invokeConfigFn registerDefinitions
module.exports = gulp
| 109607 | ###
Gulp configure script.
@create 2014-10-07
@author <NAME> <<EMAIL>>
###
loadTasks = (path)->
includeAll(
dirname: require("path").resolve __dirname, path
filter: /(.+)\.(js|coffee)$/
) or {}
invokeConfigFn = (tasks) ->
for taskName of tasks
plugins.error = (error)->
plugins.util.log error.toString()
tasks[taskName] gulp, plugins, path if tasks.hasOwnProperty(taskName)
gulp = require "gulp"
plugins = require("gulp-load-plugins")(
pattern: [
"gulp-*"
"merge-*"
"run-*"
"main-*"
]
replaceString: /\bgulp[\-.]|run[\-.]|merge[\-.]|main[\-.]/
camelizePluginName: true
lazy: true
)
plugins.config =
destPath: "./www/"
env: process.env.ENV_VARIABLE || "development"
plugins.colors = require "colors"
path = require "path"
includeAll = require "include-all"
taskConfigurations = loadTasks "./tasks/config"
registerDefinitions = loadTasks "./tasks/register"
if not registerDefinitions.default
registerDefinitions.default = (gulp)->
gulp.task 'default', []
invokeConfigFn taskConfigurations
invokeConfigFn registerDefinitions
module.exports = gulp
| true | ###
Gulp configure script.
@create 2014-10-07
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
loadTasks = (path)->
includeAll(
dirname: require("path").resolve __dirname, path
filter: /(.+)\.(js|coffee)$/
) or {}
invokeConfigFn = (tasks) ->
for taskName of tasks
plugins.error = (error)->
plugins.util.log error.toString()
tasks[taskName] gulp, plugins, path if tasks.hasOwnProperty(taskName)
gulp = require "gulp"
plugins = require("gulp-load-plugins")(
pattern: [
"gulp-*"
"merge-*"
"run-*"
"main-*"
]
replaceString: /\bgulp[\-.]|run[\-.]|merge[\-.]|main[\-.]/
camelizePluginName: true
lazy: true
)
plugins.config =
destPath: "./www/"
env: process.env.ENV_VARIABLE || "development"
plugins.colors = require "colors"
path = require "path"
includeAll = require "include-all"
taskConfigurations = loadTasks "./tasks/config"
registerDefinitions = loadTasks "./tasks/register"
if not registerDefinitions.default
registerDefinitions.default = (gulp)->
gulp.task 'default', []
invokeConfigFn taskConfigurations
invokeConfigFn registerDefinitions
module.exports = gulp
|
[
{
"context": "ibutes', ->\n template = -> br data: { name: 'Name', value: 'Value' }\n expect(render template).",
"end": 435,
"score": 0.6844505071640015,
"start": 431,
"tag": "NAME",
"value": "Name"
}
] | test/attributes.coffee | hurrymaplelad/teact | 29 | expect = require 'expect.js'
{a, br, div} = require '../src/teact'
{render} = require './helpers'
describe 'Attributes', ->
describe 'with a hash', ->
it 'renders the corresponding HTML attributes', ->
template = -> a href: '/', title: 'Home'
expect(render template).to.equal '<a href="/" title="Home"></a>'
describe 'data attribute', ->
it 'expands attributes', ->
template = -> br data: { name: 'Name', value: 'Value' }
expect(render template).to.equal '<br data-name="Name" data-value="Value"/>'
describe 'nested hyphenated attribute', ->
it 'renders', ->
template = ->
div 'data-on-x': 'beep', ->
div 'data-on-y': 'boop'
expect(render template).to.equal '<div data-on-x="beep"><div data-on-y="boop"></div></div>'
| 165479 | expect = require 'expect.js'
{a, br, div} = require '../src/teact'
{render} = require './helpers'
describe 'Attributes', ->
describe 'with a hash', ->
it 'renders the corresponding HTML attributes', ->
template = -> a href: '/', title: 'Home'
expect(render template).to.equal '<a href="/" title="Home"></a>'
describe 'data attribute', ->
it 'expands attributes', ->
template = -> br data: { name: '<NAME>', value: 'Value' }
expect(render template).to.equal '<br data-name="Name" data-value="Value"/>'
describe 'nested hyphenated attribute', ->
it 'renders', ->
template = ->
div 'data-on-x': 'beep', ->
div 'data-on-y': 'boop'
expect(render template).to.equal '<div data-on-x="beep"><div data-on-y="boop"></div></div>'
| true | expect = require 'expect.js'
{a, br, div} = require '../src/teact'
{render} = require './helpers'
describe 'Attributes', ->
describe 'with a hash', ->
it 'renders the corresponding HTML attributes', ->
template = -> a href: '/', title: 'Home'
expect(render template).to.equal '<a href="/" title="Home"></a>'
describe 'data attribute', ->
it 'expands attributes', ->
template = -> br data: { name: 'PI:NAME:<NAME>END_PI', value: 'Value' }
expect(render template).to.equal '<br data-name="Name" data-value="Value"/>'
describe 'nested hyphenated attribute', ->
it 'renders', ->
template = ->
div 'data-on-x': 'beep', ->
div 'data-on-y': 'boop'
expect(render template).to.equal '<div data-on-x="beep"><div data-on-y="boop"></div></div>'
|
[
{
"context": "',\n calls: [\n {\n url: '/user/peter/about.json',\n name: 'Get info about a us",
"end": 3973,
"score": 0.9372533559799194,
"start": 3968,
"tag": "USERNAME",
"value": "peter"
},
{
"context": " url: '/api/username_available.json?use... | app/assets/javascripts/controllers/demo.coffee | rrampage/monitor | 114 | angular.module('slug.services.demo', ['slug.service', 'slug.services'])
.factory 'DemoService', (Service, uuid) ->
create: (demo, callback) ->
endpoints = [ {url: demo.endpoint, code: demo.key} ]
service =
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints,
demo: demo.key
Service.save(service, callback)
update: (service, demo) ->
endpoints = [ {
url: demo.endpoint,
code: demo.key + uuid().substr(0, 8)
} ]
angular.extend service,
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints, demo: demo.key
.factory 'DemoApis', ->
apis = [
{
key: 'echo',
name: 'Echo',
img: 'echo.png',
endpoint: 'https://echo-api.herokuapp.com' ,
description: 'Echo is simple service which responds for every request
with JSON containing the request information. Like looking in the
mirror. Useful for debugging middlewares.',
calls: [
{
url: '/',
name: 'GET /',
method: 'GET'
}
{
url: '/',
name: 'POST /',
method: 'POST'
}
{
url: '/url',
name: 'GET /url',
method: 'GET'
}
{
url: '/whatever',
name: 'PUT /whatever',
method: 'PUT'
}
]
}
{
key: 'github',
name: 'GitHub',
img: 'github.png',
endpoint: 'https://api.github.com',
description: 'GitHub has Hypermedia JSON API. Some parts are public &
rate limited to 60 requests per hour. You can use OAuth to authenticate
and use up to 5,000 requests per hour.',
calls: [
{
url: '/events?per_page=1',
name: 'Get public events',
method: 'GET'
}
{
url: '/gists/public?per_page=1',
name: 'Get public gists',
method: 'GET'
}
{
url: '/orgs/github/events',
name: "Get GitHub events",
method: 'GET'
}
{
url: '/gists/public',
name: 'Get public gists',
method: 'GET'
}
{
url: '/zen',
name: "Get a zen koan",
method: 'GET'
}
{
url: '/emojis',
name: 'Get all emojis',
description: 'Do you see any Teletubbies in here? Do you see a
slender plastic tag clipped to my shirt with my name printed on it?
Do you see a little Asian child with a blank expression on his face
sitting outside on a mechanical helicopter that shakes when you put
quarters in it? No?'
}
{
url: '/users/vmg/starred',
name: "Get repos starred by vmg",
method: 'GET'
}
{
url: '/repos/nginx/nginx/stargazers',
name: "Get nginx/nginx stargazers",
method: 'GET'
}
{
url: '/repos/github/hubot/issues',
name: "Get issues in github/hubot",
method: 'GET'
}
]
}
{
key: 'facebook',
name: 'Facebook',
img: 'facebook.png',
endpoint: 'https://graph.facebook.com',
description: 'Facebook has JSON API which is mostly protected by OAuth,
but some parts of Graph API are open.',
calls: [
{
url: '/mike.shaver',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/19292868552',
name: 'Get info about a page',
method: 'GET'
}
]
}
{
key: 'reddit',
name: 'Reddit',
img: 'reddit.png',
endpoint: 'http://www.reddit.com/'
description: 'Reddit has both JSON and XML api (which is open source)
with some open parts, but most of it is behind OAuth.',
calls: [
{
url: '/user/peter/about.json',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/subreddits/new.json',
name: 'Get new subreddits',
method: 'GET'
}
{
url: '/subreddits/search.json?q=kitten',
name: 'Search subreddits for "kitten"',
method: 'GET'
}
{
url: '/random.json',
name: 'Get a random subreddit',
method: 'GET'
}
{
url: '/api/username_available.json?user=peter',
name: "Check username availability",
method: 'GET'
}
]
}
{
key: 'stackexchange',
name: 'Stack Exchange',
img: 'stackoverflow.png',
endpoint: 'https://api.stackexchange.com/2.1',
description: 'Stack Exchange has JSON API which is mostly open to public
with some parts with OAuth authentication.',
calls: [
{
url: '/badges?site=stackoverflow',
name: 'Get all badges',
method: 'GET'
}
{
url: '/answers?site=stackoverflow',
name: 'Get lastest answers',
method: 'GET'
}
{
url: '/questions?tagged=lua&site=stackoverflow',
name: 'Get latest Lua questions',
method: 'GET'
}
{
url: '/similar?title=get%20css%20by%20ajax&site=stackoverflow',
name: 'Get similar questions',
method: 'GET'
}
{
url: '/tags/lua/top-askers/all_time?site=stackoverflow',
name: 'Get top Lua askers',
method: 'GET'
}
{
url: '/tags/java/synonyms?site=stackoverflow',
name: 'Get Java tag synonyms',
method: 'GET'
}
]
}
{
key: 'wikipedia',
name: 'Wikipedia',
img: 'wikipedia.png',
endpoint: 'http://en.wikipedia.org/w/api.php',
description: 'Wikipedia has API with many output formats like: json, php,
yaml, txt, xml, ....',
calls: [
{
url: '?format=php&action=query&titles=David%20Hasselhoff',
name: 'Get David Hasselhoff page, as PHP',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Austin_powers&prop=revisions',
name: 'Get page revisions',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Earth|Wind|Fire',
name: 'Search Earth, Wind or Fire',
method: 'GET'
}
{
url: '?format=json&action=sitematrix',
name: 'Get sitematrix',
method: 'GET'
}
{
url: '?format=json&action=compare&fromtitle=red&totitle=green',
name: 'Compare Red and Green',
method: 'GET'
}
]
}
{
key: 'bitbucket',
name: 'Bitbucket',
img: 'bitbucket.png',
endpoint: 'https://bitbucket.org/api',
description: 'Bitbucket has JSON REST API with public access to open
source repositories.',
calls: [
{
url: '/2.0/repositories/rude/love/commits',
name: 'Get repo commits',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/followers',
name: 'Get repo followers',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/events',
name: 'Get repo events',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/branches',
name: 'Get repo branches',
method: 'GET'
}
]
}
]
_(apis).indexBy('key')
.factory 'DemoCall', ($http) ->
perform: (service, call) ->
params =
url: call.url
args: call.args
body: call.body
method: call.method
$http(
method: 'GET',
url: "/api/services/#{service._id}/call",
cache: false,
params: params,
transformResponse: []
)
.controller 'DemoCallCtrl', ($scope, DemoCall, $analytics, uuid) ->
updateResponse = (response) ->
$scope.loading = false
$scope.response = response.data
$scope.status = response.status
$scope.contentType = response.headers('Content-Type')?.split(';')[0]
$scope.perform = ->
$scope.loading = true
call = DemoCall.perform($scope.service, $scope.call)
call.then(updateResponse, updateResponse)
$analytics.eventTrack('demo_call.used',
service_id: $scope.service._id, demo: $scope.service.demo, call: $scope.call)
.directive 'demoCall', ->
scope:
call: '=demoCall'
service: '=demoService'
controller: 'DemoCallCtrl'
template: """
<div class="call">
<span ng-class="{visible: response && !loading}"
class="status-code label label-{{ status | status }}">
{{ status }}
</span>
<button class="btn-call" type="button" ng-disabled="loading"
ng-click="perform()">
<i ng-class="{'icon-refresh': loading,
'icon-cloud-download': !loading}" ></i> {{ call.name }}
</button>
<span class="loading" ng-show="loading">loading…</span>
<span ng-if="response">
<a class="demo-response" ng-href="services/{{service._id}}/traces">
See response <i class="icon-chevron-right"></i>
</a>
</span>
</div>
"""
| 98820 | angular.module('slug.services.demo', ['slug.service', 'slug.services'])
.factory 'DemoService', (Service, uuid) ->
create: (demo, callback) ->
endpoints = [ {url: demo.endpoint, code: demo.key} ]
service =
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints,
demo: demo.key
Service.save(service, callback)
update: (service, demo) ->
endpoints = [ {
url: demo.endpoint,
code: demo.key + uuid().substr(0, 8)
} ]
angular.extend service,
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints, demo: demo.key
.factory 'DemoApis', ->
apis = [
{
key: 'echo',
name: 'Echo',
img: 'echo.png',
endpoint: 'https://echo-api.herokuapp.com' ,
description: 'Echo is simple service which responds for every request
with JSON containing the request information. Like looking in the
mirror. Useful for debugging middlewares.',
calls: [
{
url: '/',
name: 'GET /',
method: 'GET'
}
{
url: '/',
name: 'POST /',
method: 'POST'
}
{
url: '/url',
name: 'GET /url',
method: 'GET'
}
{
url: '/whatever',
name: 'PUT /whatever',
method: 'PUT'
}
]
}
{
key: 'github',
name: 'GitHub',
img: 'github.png',
endpoint: 'https://api.github.com',
description: 'GitHub has Hypermedia JSON API. Some parts are public &
rate limited to 60 requests per hour. You can use OAuth to authenticate
and use up to 5,000 requests per hour.',
calls: [
{
url: '/events?per_page=1',
name: 'Get public events',
method: 'GET'
}
{
url: '/gists/public?per_page=1',
name: 'Get public gists',
method: 'GET'
}
{
url: '/orgs/github/events',
name: "Get GitHub events",
method: 'GET'
}
{
url: '/gists/public',
name: 'Get public gists',
method: 'GET'
}
{
url: '/zen',
name: "Get a zen koan",
method: 'GET'
}
{
url: '/emojis',
name: 'Get all emojis',
description: 'Do you see any Teletubbies in here? Do you see a
slender plastic tag clipped to my shirt with my name printed on it?
Do you see a little Asian child with a blank expression on his face
sitting outside on a mechanical helicopter that shakes when you put
quarters in it? No?'
}
{
url: '/users/vmg/starred',
name: "Get repos starred by vmg",
method: 'GET'
}
{
url: '/repos/nginx/nginx/stargazers',
name: "Get nginx/nginx stargazers",
method: 'GET'
}
{
url: '/repos/github/hubot/issues',
name: "Get issues in github/hubot",
method: 'GET'
}
]
}
{
key: 'facebook',
name: 'Facebook',
img: 'facebook.png',
endpoint: 'https://graph.facebook.com',
description: 'Facebook has JSON API which is mostly protected by OAuth,
but some parts of Graph API are open.',
calls: [
{
url: '/mike.shaver',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/19292868552',
name: 'Get info about a page',
method: 'GET'
}
]
}
{
key: 'reddit',
name: 'Reddit',
img: 'reddit.png',
endpoint: 'http://www.reddit.com/'
description: 'Reddit has both JSON and XML api (which is open source)
with some open parts, but most of it is behind OAuth.',
calls: [
{
url: '/user/peter/about.json',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/subreddits/new.json',
name: 'Get new subreddits',
method: 'GET'
}
{
url: '/subreddits/search.json?q=kitten',
name: 'Search subreddits for "kitten"',
method: 'GET'
}
{
url: '/random.json',
name: 'Get a random subreddit',
method: 'GET'
}
{
url: '/api/username_available.json?user=peter',
name: "Check username availability",
method: 'GET'
}
]
}
{
key: 'stackexchange',
name: 'Stack Exchange',
img: 'stackoverflow.png',
endpoint: 'https://api.stackexchange.com/2.1',
description: 'Stack Exchange has JSON API which is mostly open to public
with some parts with OAuth authentication.',
calls: [
{
url: '/badges?site=stackoverflow',
name: 'Get all badges',
method: 'GET'
}
{
url: '/answers?site=stackoverflow',
name: 'Get lastest answers',
method: 'GET'
}
{
url: '/questions?tagged=lua&site=stackoverflow',
name: 'Get latest Lua questions',
method: 'GET'
}
{
url: '/similar?title=get%20css%20by%20ajax&site=stackoverflow',
name: 'Get similar questions',
method: 'GET'
}
{
url: '/tags/lua/top-askers/all_time?site=stackoverflow',
name: 'Get top Lua askers',
method: 'GET'
}
{
url: '/tags/java/synonyms?site=stackoverflow',
name: 'Get Java tag synonyms',
method: 'GET'
}
]
}
{
key: 'wikipedia',
name: 'Wikipedia',
img: 'wikipedia.png',
endpoint: 'http://en.wikipedia.org/w/api.php',
description: 'Wikipedia has API with many output formats like: json, php,
yaml, txt, xml, ....',
calls: [
{
url: '?format=php&action=query&titles=<NAME>%20<NAME>',
name: 'Get <NAME> page, as PHP',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Austin_powers&prop=revisions',
name: 'Get page revisions',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Earth|Wind|Fire',
name: 'Search Earth, Wind or Fire',
method: 'GET'
}
{
url: '?format=json&action=sitematrix',
name: 'Get sitematrix',
method: 'GET'
}
{
url: '?format=json&action=compare&fromtitle=red&totitle=green',
name: 'Compare Red and Green',
method: 'GET'
}
]
}
{
key: 'bitbucket',
name: '<NAME>',
img: 'bitbucket.png',
endpoint: 'https://bitbucket.org/api',
description: 'Bitbucket has JSON REST API with public access to open
source repositories.',
calls: [
{
url: '/2.0/repositories/rude/love/commits',
name: 'Get repo commits',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/followers',
name: 'Get repo followers',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/events',
name: 'Get repo events',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/branches',
name: 'Get repo branches',
method: 'GET'
}
]
}
]
_(apis).indexBy('key')
.factory 'DemoCall', ($http) ->
perform: (service, call) ->
params =
url: call.url
args: call.args
body: call.body
method: call.method
$http(
method: 'GET',
url: "/api/services/#{service._id}/call",
cache: false,
params: params,
transformResponse: []
)
.controller 'DemoCallCtrl', ($scope, DemoCall, $analytics, uuid) ->
updateResponse = (response) ->
$scope.loading = false
$scope.response = response.data
$scope.status = response.status
$scope.contentType = response.headers('Content-Type')?.split(';')[0]
$scope.perform = ->
$scope.loading = true
call = DemoCall.perform($scope.service, $scope.call)
call.then(updateResponse, updateResponse)
$analytics.eventTrack('demo_call.used',
service_id: $scope.service._id, demo: $scope.service.demo, call: $scope.call)
.directive 'demoCall', ->
scope:
call: '=demoCall'
service: '=demoService'
controller: 'DemoCallCtrl'
template: """
<div class="call">
<span ng-class="{visible: response && !loading}"
class="status-code label label-{{ status | status }}">
{{ status }}
</span>
<button class="btn-call" type="button" ng-disabled="loading"
ng-click="perform()">
<i ng-class="{'icon-refresh': loading,
'icon-cloud-download': !loading}" ></i> {{ call.name }}
</button>
<span class="loading" ng-show="loading">loading…</span>
<span ng-if="response">
<a class="demo-response" ng-href="services/{{service._id}}/traces">
See response <i class="icon-chevron-right"></i>
</a>
</span>
</div>
"""
| true | angular.module('slug.services.demo', ['slug.service', 'slug.services'])
.factory 'DemoService', (Service, uuid) ->
create: (demo, callback) ->
endpoints = [ {url: demo.endpoint, code: demo.key} ]
service =
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints,
demo: demo.key
Service.save(service, callback)
update: (service, demo) ->
endpoints = [ {
url: demo.endpoint,
code: demo.key + uuid().substr(0, 8)
} ]
angular.extend service,
name: "#{demo.name} API",
description: demo.description,
endpoints: endpoints, demo: demo.key
.factory 'DemoApis', ->
apis = [
{
key: 'echo',
name: 'Echo',
img: 'echo.png',
endpoint: 'https://echo-api.herokuapp.com' ,
description: 'Echo is simple service which responds for every request
with JSON containing the request information. Like looking in the
mirror. Useful for debugging middlewares.',
calls: [
{
url: '/',
name: 'GET /',
method: 'GET'
}
{
url: '/',
name: 'POST /',
method: 'POST'
}
{
url: '/url',
name: 'GET /url',
method: 'GET'
}
{
url: '/whatever',
name: 'PUT /whatever',
method: 'PUT'
}
]
}
{
key: 'github',
name: 'GitHub',
img: 'github.png',
endpoint: 'https://api.github.com',
description: 'GitHub has Hypermedia JSON API. Some parts are public &
rate limited to 60 requests per hour. You can use OAuth to authenticate
and use up to 5,000 requests per hour.',
calls: [
{
url: '/events?per_page=1',
name: 'Get public events',
method: 'GET'
}
{
url: '/gists/public?per_page=1',
name: 'Get public gists',
method: 'GET'
}
{
url: '/orgs/github/events',
name: "Get GitHub events",
method: 'GET'
}
{
url: '/gists/public',
name: 'Get public gists',
method: 'GET'
}
{
url: '/zen',
name: "Get a zen koan",
method: 'GET'
}
{
url: '/emojis',
name: 'Get all emojis',
description: 'Do you see any Teletubbies in here? Do you see a
slender plastic tag clipped to my shirt with my name printed on it?
Do you see a little Asian child with a blank expression on his face
sitting outside on a mechanical helicopter that shakes when you put
quarters in it? No?'
}
{
url: '/users/vmg/starred',
name: "Get repos starred by vmg",
method: 'GET'
}
{
url: '/repos/nginx/nginx/stargazers',
name: "Get nginx/nginx stargazers",
method: 'GET'
}
{
url: '/repos/github/hubot/issues',
name: "Get issues in github/hubot",
method: 'GET'
}
]
}
{
key: 'facebook',
name: 'Facebook',
img: 'facebook.png',
endpoint: 'https://graph.facebook.com',
description: 'Facebook has JSON API which is mostly protected by OAuth,
but some parts of Graph API are open.',
calls: [
{
url: '/mike.shaver',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/19292868552',
name: 'Get info about a page',
method: 'GET'
}
]
}
{
key: 'reddit',
name: 'Reddit',
img: 'reddit.png',
endpoint: 'http://www.reddit.com/'
description: 'Reddit has both JSON and XML api (which is open source)
with some open parts, but most of it is behind OAuth.',
calls: [
{
url: '/user/peter/about.json',
name: 'Get info about a user',
method: 'GET'
}
{
url: '/subreddits/new.json',
name: 'Get new subreddits',
method: 'GET'
}
{
url: '/subreddits/search.json?q=kitten',
name: 'Search subreddits for "kitten"',
method: 'GET'
}
{
url: '/random.json',
name: 'Get a random subreddit',
method: 'GET'
}
{
url: '/api/username_available.json?user=peter',
name: "Check username availability",
method: 'GET'
}
]
}
{
key: 'stackexchange',
name: 'Stack Exchange',
img: 'stackoverflow.png',
endpoint: 'https://api.stackexchange.com/2.1',
description: 'Stack Exchange has JSON API which is mostly open to public
with some parts with OAuth authentication.',
calls: [
{
url: '/badges?site=stackoverflow',
name: 'Get all badges',
method: 'GET'
}
{
url: '/answers?site=stackoverflow',
name: 'Get lastest answers',
method: 'GET'
}
{
url: '/questions?tagged=lua&site=stackoverflow',
name: 'Get latest Lua questions',
method: 'GET'
}
{
url: '/similar?title=get%20css%20by%20ajax&site=stackoverflow',
name: 'Get similar questions',
method: 'GET'
}
{
url: '/tags/lua/top-askers/all_time?site=stackoverflow',
name: 'Get top Lua askers',
method: 'GET'
}
{
url: '/tags/java/synonyms?site=stackoverflow',
name: 'Get Java tag synonyms',
method: 'GET'
}
]
}
{
key: 'wikipedia',
name: 'Wikipedia',
img: 'wikipedia.png',
endpoint: 'http://en.wikipedia.org/w/api.php',
description: 'Wikipedia has API with many output formats like: json, php,
yaml, txt, xml, ....',
calls: [
{
url: '?format=php&action=query&titles=PI:NAME:<NAME>END_PI%20PI:NAME:<NAME>END_PI',
name: 'Get PI:NAME:<NAME>END_PI page, as PHP',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Austin_powers&prop=revisions',
name: 'Get page revisions',
method: 'GET'
}
{
url: '?format=json&action=query&titles=Earth|Wind|Fire',
name: 'Search Earth, Wind or Fire',
method: 'GET'
}
{
url: '?format=json&action=sitematrix',
name: 'Get sitematrix',
method: 'GET'
}
{
url: '?format=json&action=compare&fromtitle=red&totitle=green',
name: 'Compare Red and Green',
method: 'GET'
}
]
}
{
key: 'bitbucket',
name: 'PI:NAME:<NAME>END_PI',
img: 'bitbucket.png',
endpoint: 'https://bitbucket.org/api',
description: 'Bitbucket has JSON REST API with public access to open
source repositories.',
calls: [
{
url: '/2.0/repositories/rude/love/commits',
name: 'Get repo commits',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/followers',
name: 'Get repo followers',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/events',
name: 'Get repo events',
method: 'GET'
}
{
url: '/1.0/repositories/rude/love/branches',
name: 'Get repo branches',
method: 'GET'
}
]
}
]
_(apis).indexBy('key')
.factory 'DemoCall', ($http) ->
perform: (service, call) ->
params =
url: call.url
args: call.args
body: call.body
method: call.method
$http(
method: 'GET',
url: "/api/services/#{service._id}/call",
cache: false,
params: params,
transformResponse: []
)
.controller 'DemoCallCtrl', ($scope, DemoCall, $analytics, uuid) ->
updateResponse = (response) ->
$scope.loading = false
$scope.response = response.data
$scope.status = response.status
$scope.contentType = response.headers('Content-Type')?.split(';')[0]
$scope.perform = ->
$scope.loading = true
call = DemoCall.perform($scope.service, $scope.call)
call.then(updateResponse, updateResponse)
$analytics.eventTrack('demo_call.used',
service_id: $scope.service._id, demo: $scope.service.demo, call: $scope.call)
.directive 'demoCall', ->
scope:
call: '=demoCall'
service: '=demoService'
controller: 'DemoCallCtrl'
template: """
<div class="call">
<span ng-class="{visible: response && !loading}"
class="status-code label label-{{ status | status }}">
{{ status }}
</span>
<button class="btn-call" type="button" ng-disabled="loading"
ng-click="perform()">
<i ng-class="{'icon-refresh': loading,
'icon-cloud-download': !loading}" ></i> {{ call.name }}
</button>
<span class="loading" ng-show="loading">loading…</span>
<span ng-if="response">
<a class="demo-response" ng-href="services/{{service._id}}/traces">
See response <i class="icon-chevron-right"></i>
</a>
</span>
</div>
"""
|
[
{
"context": "it: (scope, element) ->\n scope.name = 'Ubuntu'\n\n try\n alight.bootstrap el\n catch e",
"end": 2293,
"score": 0.5837878584861755,
"start": 2287,
"tag": "NAME",
"value": "Ubuntu"
}
] | test/core/ns.coffee | dragonboy612/Angular_Knockout | 317 |
Test('ns-0').run ($test, alight) ->
$test.start 3
f$ = alight.f$
# ut-test3
do ->
el = ttDOM '<p ut-test3="linux"></p>'
alight.directives.ut =
test3: (scope, el, name) ->
el.textContent = name
alight el
$test.equal ttGetText(el), 'linux'
# local ut-test3
do ->
scope =
utTest3: (el, name) ->
el.textContent = name + '_loc'
el = ttDOM '<p ut-test3!="$element, \'linux\'"></p>'
alight el, scope
$test.equal ttGetText(el), 'linux_loc'
# filter
do ->
scope =
double: ->
'linux'
el = ttDOM '<p>{{x | double}}</p>'
alight el, scope
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-0').run ($test, alight) ->
$test.start 2
el = ttDOM """
<div>
<top>
top={{value0}}
<middle>
<inner>inner={{value1}}</inner>
</middle>
</top>
</div>
"""
result = ''
alight.d.$global.top =
restrict: 'E'
init: (scope, element) ->
scope.value0 = 'TOP'
result += 'top'
alight.d.$global.inner =
restrict: 'E'
init: (scope, element) ->
scope.value1 = 'INNER'
result += 'inner'
alight.bootstrap el
$test.equal result, 'topinner'
$test.equal ttGetText(el), 'top=TOP inner=INNER'
$test.close()
Test('$global-1').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.bootstrap el,
name: 'linux'
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-2').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa = {}
try
alight.bootstrap el,
name: 'linux'
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
Test('$global-3').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa =
restrict: 'E'
init: (scope, element) ->
scope.name = 'Ubuntu'
try
alight.bootstrap el
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
| 152327 |
Test('ns-0').run ($test, alight) ->
$test.start 3
f$ = alight.f$
# ut-test3
do ->
el = ttDOM '<p ut-test3="linux"></p>'
alight.directives.ut =
test3: (scope, el, name) ->
el.textContent = name
alight el
$test.equal ttGetText(el), 'linux'
# local ut-test3
do ->
scope =
utTest3: (el, name) ->
el.textContent = name + '_loc'
el = ttDOM '<p ut-test3!="$element, \'linux\'"></p>'
alight el, scope
$test.equal ttGetText(el), 'linux_loc'
# filter
do ->
scope =
double: ->
'linux'
el = ttDOM '<p>{{x | double}}</p>'
alight el, scope
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-0').run ($test, alight) ->
$test.start 2
el = ttDOM """
<div>
<top>
top={{value0}}
<middle>
<inner>inner={{value1}}</inner>
</middle>
</top>
</div>
"""
result = ''
alight.d.$global.top =
restrict: 'E'
init: (scope, element) ->
scope.value0 = 'TOP'
result += 'top'
alight.d.$global.inner =
restrict: 'E'
init: (scope, element) ->
scope.value1 = 'INNER'
result += 'inner'
alight.bootstrap el
$test.equal result, 'topinner'
$test.equal ttGetText(el), 'top=TOP inner=INNER'
$test.close()
Test('$global-1').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.bootstrap el,
name: 'linux'
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-2').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa = {}
try
alight.bootstrap el,
name: 'linux'
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
Test('$global-3').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa =
restrict: 'E'
init: (scope, element) ->
scope.name = '<NAME>'
try
alight.bootstrap el
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
| true |
Test('ns-0').run ($test, alight) ->
$test.start 3
f$ = alight.f$
# ut-test3
do ->
el = ttDOM '<p ut-test3="linux"></p>'
alight.directives.ut =
test3: (scope, el, name) ->
el.textContent = name
alight el
$test.equal ttGetText(el), 'linux'
# local ut-test3
do ->
scope =
utTest3: (el, name) ->
el.textContent = name + '_loc'
el = ttDOM '<p ut-test3!="$element, \'linux\'"></p>'
alight el, scope
$test.equal ttGetText(el), 'linux_loc'
# filter
do ->
scope =
double: ->
'linux'
el = ttDOM '<p>{{x | double}}</p>'
alight el, scope
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-0').run ($test, alight) ->
$test.start 2
el = ttDOM """
<div>
<top>
top={{value0}}
<middle>
<inner>inner={{value1}}</inner>
</middle>
</top>
</div>
"""
result = ''
alight.d.$global.top =
restrict: 'E'
init: (scope, element) ->
scope.value0 = 'TOP'
result += 'top'
alight.d.$global.inner =
restrict: 'E'
init: (scope, element) ->
scope.value1 = 'INNER'
result += 'inner'
alight.bootstrap el
$test.equal result, 'topinner'
$test.equal ttGetText(el), 'top=TOP inner=INNER'
$test.close()
Test('$global-1').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.bootstrap el,
name: 'linux'
$test.equal ttGetText(el), 'linux'
$test.close()
Test('$global-2').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa = {}
try
alight.bootstrap el,
name: 'linux'
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
Test('$global-3').run ($test, alight) ->
$test.start 1
el = ttDOM """
<aa-div>{{name}}</aa-div>
"""
alight.d.aa =
restrict: 'E'
init: (scope, element) ->
scope.name = 'PI:NAME:<NAME>END_PI'
try
alight.bootstrap el
catch e
$test.equal e, 'Directive not found: aa-div'
$test.close()
|
[
{
"context": "##\n# CRUD methods for our customers.\n#\n# @author: Daniele Gazzelloni <daniele@danielegazzelloni.com>\n#################",
"end": 68,
"score": 0.9998906850814819,
"start": 50,
"tag": "NAME",
"value": "Daniele Gazzelloni"
},
{
"context": "r our customers.\n#\n# @author... | backend/src/customers.coffee | danielegazzelloni/barbershop-challenge | 0 | ##
# CRUD methods for our customers.
#
# @author: Daniele Gazzelloni <daniele@danielegazzelloni.com>
######################################################################
db = require('./db')
logger = require('./logger')
# Insert a new customer into its Mongodb collection
insertCustomer = (customer, callback) ->
thisCustomer = new db.models.Customers(customer)
thisCustomer.save customer, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Retrieve a customer by id
getCustomer = (id, callback) ->
db.models.Customers.findOne {_id: id}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Edit a customer
editCustomer = (customer, callback) ->
db.models.Customers.update {_id: customer._id}, customer, {}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Identify which CRUD operation we should do.
#
# Note: we should implement processVerb() in every macro-module, like
# one for customers, one for users, one for moduleXXX, etc..
# This way we could use the same method syntax in the entire app.
processVerb = (req, verb, callback) ->
# First discrimination
if verb is "GET"
# GET a customer
if req.query.id
getCustomer req.query.id, (error, result) ->
logger.log "*", "getting customer with id=#{req.query.id}";
callback(error, result)
else
logger.log "*", "ERROR: getting customer with id undefined.";
callback(400, {})
else if verb is "POST"
# INSERT a customer
if (req.body.id is null or req.body.id is undefined) and req.body.name and req.body.email
customer = {
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
insertCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} inserted.";
callback(error, result)
# EDIT a customer
else if req.body.id and req.body.name and req.body.email
customer = {
_id : req.body.id,
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
editCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} edited.";
callback(error, result)
else
logger.log "*", "ERROR: wrong parameters for editCustomer/insertCustomer...";
callback(400, {})
# Unrecognized method
else
logger.log "*", "WARNING: unrecognized method...";
callback(400, {})
# Module exports
exports.insertCustomer = insertCustomer
exports.getCustomer = getCustomer
exports.editCustomer = editCustomer
exports.processVerb = processVerb | 181049 | ##
# CRUD methods for our customers.
#
# @author: <NAME> <<EMAIL>>
######################################################################
db = require('./db')
logger = require('./logger')
# Insert a new customer into its Mongodb collection
insertCustomer = (customer, callback) ->
thisCustomer = new db.models.Customers(customer)
thisCustomer.save customer, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Retrieve a customer by id
getCustomer = (id, callback) ->
db.models.Customers.findOne {_id: id}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Edit a customer
editCustomer = (customer, callback) ->
db.models.Customers.update {_id: customer._id}, customer, {}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Identify which CRUD operation we should do.
#
# Note: we should implement processVerb() in every macro-module, like
# one for customers, one for users, one for moduleXXX, etc..
# This way we could use the same method syntax in the entire app.
processVerb = (req, verb, callback) ->
# First discrimination
if verb is "GET"
# GET a customer
if req.query.id
getCustomer req.query.id, (error, result) ->
logger.log "*", "getting customer with id=#{req.query.id}";
callback(error, result)
else
logger.log "*", "ERROR: getting customer with id undefined.";
callback(400, {})
else if verb is "POST"
# INSERT a customer
if (req.body.id is null or req.body.id is undefined) and req.body.name and req.body.email
customer = {
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
insertCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} inserted.";
callback(error, result)
# EDIT a customer
else if req.body.id and req.body.name and req.body.email
customer = {
_id : req.body.id,
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
editCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} edited.";
callback(error, result)
else
logger.log "*", "ERROR: wrong parameters for editCustomer/insertCustomer...";
callback(400, {})
# Unrecognized method
else
logger.log "*", "WARNING: unrecognized method...";
callback(400, {})
# Module exports
exports.insertCustomer = insertCustomer
exports.getCustomer = getCustomer
exports.editCustomer = editCustomer
exports.processVerb = processVerb | true | ##
# CRUD methods for our customers.
#
# @author: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
######################################################################
db = require('./db')
logger = require('./logger')
# Insert a new customer into its Mongodb collection
insertCustomer = (customer, callback) ->
thisCustomer = new db.models.Customers(customer)
thisCustomer.save customer, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Retrieve a customer by id
getCustomer = (id, callback) ->
db.models.Customers.findOne {_id: id}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Edit a customer
editCustomer = (customer, callback) ->
db.models.Customers.update {_id: customer._id}, customer, {}, (error, model) ->
error = (error ? error : 200)
model = (model ? model : {})
callback(error, model)
# Identify which CRUD operation we should do.
#
# Note: we should implement processVerb() in every macro-module, like
# one for customers, one for users, one for moduleXXX, etc..
# This way we could use the same method syntax in the entire app.
processVerb = (req, verb, callback) ->
# First discrimination
if verb is "GET"
# GET a customer
if req.query.id
getCustomer req.query.id, (error, result) ->
logger.log "*", "getting customer with id=#{req.query.id}";
callback(error, result)
else
logger.log "*", "ERROR: getting customer with id undefined.";
callback(400, {})
else if verb is "POST"
# INSERT a customer
if (req.body.id is null or req.body.id is undefined) and req.body.name and req.body.email
customer = {
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
insertCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} inserted.";
callback(error, result)
# EDIT a customer
else if req.body.id and req.body.name and req.body.email
customer = {
_id : req.body.id,
name : req.body.name,
email : req.body.email,
barber: req.body.barber
}
editCustomer customer, (error, result) ->
logger.log "*", "customer #{customer.name} edited.";
callback(error, result)
else
logger.log "*", "ERROR: wrong parameters for editCustomer/insertCustomer...";
callback(400, {})
# Unrecognized method
else
logger.log "*", "WARNING: unrecognized method...";
callback(400, {})
# Module exports
exports.insertCustomer = insertCustomer
exports.getCustomer = getCustomer
exports.editCustomer = editCustomer
exports.processVerb = processVerb |
[
{
"context": "resulting class is considered Medic.\n *\n * @name Medic\n * @prerequisite Heal 50000 damage\n * @effect +",
"end": 226,
"score": 0.7304458618164062,
"start": 221,
"tag": "NAME",
"value": "Medic"
}
] | src/character/personalities/Medic.coffee | jawsome/IdleLands | 3 |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
`/**
* This personality makes you never change classes, unless the resulting class is considered Medic.
*
* @name Medic
* @prerequisite Heal 50000 damage
* @effect +5% WIS
* @effect -3% AGI
* @effect -3% DEX
* @category Personalities
* @package Player
*/`
class Medic extends Personality
constructor: ->
wisPercent: -> 5
agiPercent: -> -3
dexPercent: -> -3
classChangePercent: (potential) ->
-100 if not Constants.isMedic potential
@canUse = (player) ->
player.statistics["calculated total heals given"] >= 50000
@desc = "Heal 50000 damage"
module.exports = exports = Medic | 196301 |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
`/**
* This personality makes you never change classes, unless the resulting class is considered Medic.
*
* @name <NAME>
* @prerequisite Heal 50000 damage
* @effect +5% WIS
* @effect -3% AGI
* @effect -3% DEX
* @category Personalities
* @package Player
*/`
class Medic extends Personality
constructor: ->
wisPercent: -> 5
agiPercent: -> -3
dexPercent: -> -3
classChangePercent: (potential) ->
-100 if not Constants.isMedic potential
@canUse = (player) ->
player.statistics["calculated total heals given"] >= 50000
@desc = "Heal 50000 damage"
module.exports = exports = Medic | true |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
`/**
* This personality makes you never change classes, unless the resulting class is considered Medic.
*
* @name PI:NAME:<NAME>END_PI
* @prerequisite Heal 50000 damage
* @effect +5% WIS
* @effect -3% AGI
* @effect -3% DEX
* @category Personalities
* @package Player
*/`
class Medic extends Personality
constructor: ->
wisPercent: -> 5
agiPercent: -> -3
dexPercent: -> -3
classChangePercent: (potential) ->
-100 if not Constants.isMedic potential
@canUse = (player) ->
player.statistics["calculated total heals given"] >= 50000
@desc = "Heal 50000 damage"
module.exports = exports = Medic |
[
{
"context": "gs =\n db: 'purple_circle_chat'\n cookie_secret: 'purple-circle-1337'\n metatags:\n title: 'Chat'\n description: '",
"end": 74,
"score": 0.995844304561615,
"start": 56,
"tag": "KEY",
"value": "purple-circle-1337"
}
] | coffeescript/settings.coffee | purple-circle/chat | 0 | settings =
db: 'purple_circle_chat'
cookie_secret: 'purple-circle-1337'
metatags:
title: 'Chat'
description: 'Chat with text and images and swag yolos'
keywords: 'chat, community, cat selfies and butts'
url: 'http://www.happijoy.com'
image: 'http://i.imgur.com/EGmPACE.jpg'
module.exports = settings
| 12135 | settings =
db: 'purple_circle_chat'
cookie_secret: '<KEY>'
metatags:
title: 'Chat'
description: 'Chat with text and images and swag yolos'
keywords: 'chat, community, cat selfies and butts'
url: 'http://www.happijoy.com'
image: 'http://i.imgur.com/EGmPACE.jpg'
module.exports = settings
| true | settings =
db: 'purple_circle_chat'
cookie_secret: 'PI:KEY:<KEY>END_PI'
metatags:
title: 'Chat'
description: 'Chat with text and images and swag yolos'
keywords: 'chat, community, cat selfies and butts'
url: 'http://www.happijoy.com'
image: 'http://i.imgur.com/EGmPACE.jpg'
module.exports = settings
|
[
{
"context": " * `next`: -> Mark\n\n# mark\n# ====\n#\n# An API for Jaque Bertin's notion of visualization \"marks\".\ndefine ['cv/Re",
"end": 124,
"score": 0.9982927441596985,
"start": 112,
"tag": "NAME",
"value": "Jaque Bertin"
}
] | src/mark.coffee | curran/canvas-vis | 2 | # MarkIterator
# ============
#
# * `hasNext`: -> Boolean
# * `next`: -> Mark
# mark
# ====
#
# An API for Jaque Bertin's notion of visualization "marks".
define ['cv/Rectangle', 'cv/Point']
, (Rectangle, Point) ->
mark = ->
_.extend p, defaults
return singleton
# p means properties
p = {}
defaults =
bounds: new Rectangle
fillStyle: 'black'
shape: 'square'
rotation: 0
singleton =
# Chainable property setter functions
shape: ( shape ) -> p.shape = shape; @
fillStyle: (cssColorStr) -> p.fillStyle = cssColorStr; @
size: ( w, h = w ) ->
p.bounds.w = w; p.bounds.h = h; @
x: ( x ) -> p.bounds.x = x; @
y: ( y ) -> p.bounds.y = y; @
w: ( w ) -> p.bounds.w = w; @
h: ( h ) -> p.bounds.h = h; @
rotation: ( rotation ) -> p.rotation = rotation; @
# Functions that evaluate the mark
getBounds: ->
shape().bounds()
render: (ctx, viewport) ->
shape().render(ctx, viewport)
shape = ->
if !shapes[p.shape]
throw Error "Unknown shape type '#{p.shape}'"
shapes[p.shape]
shapes =
square:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
ctx.fillRect(
destRect.x,
destRect.y,
destRect.w,
destRect.h
)
circle:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
x = destRect.x + destRect.w/2
y = destRect.y + destRect.h/2
r = (destRect.w + destRect.h)/4
ctx.beginPath()
ctx.arc x, y, r, 0, 2*Math.PI
ctx.fill()
ctx.closePath()
destRect = new Point
return mark
| 37123 | # MarkIterator
# ============
#
# * `hasNext`: -> Boolean
# * `next`: -> Mark
# mark
# ====
#
# An API for <NAME>'s notion of visualization "marks".
define ['cv/Rectangle', 'cv/Point']
, (Rectangle, Point) ->
mark = ->
_.extend p, defaults
return singleton
# p means properties
p = {}
defaults =
bounds: new Rectangle
fillStyle: 'black'
shape: 'square'
rotation: 0
singleton =
# Chainable property setter functions
shape: ( shape ) -> p.shape = shape; @
fillStyle: (cssColorStr) -> p.fillStyle = cssColorStr; @
size: ( w, h = w ) ->
p.bounds.w = w; p.bounds.h = h; @
x: ( x ) -> p.bounds.x = x; @
y: ( y ) -> p.bounds.y = y; @
w: ( w ) -> p.bounds.w = w; @
h: ( h ) -> p.bounds.h = h; @
rotation: ( rotation ) -> p.rotation = rotation; @
# Functions that evaluate the mark
getBounds: ->
shape().bounds()
render: (ctx, viewport) ->
shape().render(ctx, viewport)
shape = ->
if !shapes[p.shape]
throw Error "Unknown shape type '#{p.shape}'"
shapes[p.shape]
shapes =
square:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
ctx.fillRect(
destRect.x,
destRect.y,
destRect.w,
destRect.h
)
circle:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
x = destRect.x + destRect.w/2
y = destRect.y + destRect.h/2
r = (destRect.w + destRect.h)/4
ctx.beginPath()
ctx.arc x, y, r, 0, 2*Math.PI
ctx.fill()
ctx.closePath()
destRect = new Point
return mark
| true | # MarkIterator
# ============
#
# * `hasNext`: -> Boolean
# * `next`: -> Mark
# mark
# ====
#
# An API for PI:NAME:<NAME>END_PI's notion of visualization "marks".
define ['cv/Rectangle', 'cv/Point']
, (Rectangle, Point) ->
mark = ->
_.extend p, defaults
return singleton
# p means properties
p = {}
defaults =
bounds: new Rectangle
fillStyle: 'black'
shape: 'square'
rotation: 0
singleton =
# Chainable property setter functions
shape: ( shape ) -> p.shape = shape; @
fillStyle: (cssColorStr) -> p.fillStyle = cssColorStr; @
size: ( w, h = w ) ->
p.bounds.w = w; p.bounds.h = h; @
x: ( x ) -> p.bounds.x = x; @
y: ( y ) -> p.bounds.y = y; @
w: ( w ) -> p.bounds.w = w; @
h: ( h ) -> p.bounds.h = h; @
rotation: ( rotation ) -> p.rotation = rotation; @
# Functions that evaluate the mark
getBounds: ->
shape().bounds()
render: (ctx, viewport) ->
shape().render(ctx, viewport)
shape = ->
if !shapes[p.shape]
throw Error "Unknown shape type '#{p.shape}'"
shapes[p.shape]
shapes =
square:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
ctx.fillRect(
destRect.x,
destRect.y,
destRect.w,
destRect.h
)
circle:
bounds: -> p.bounds.clone()
render: (ctx, viewport) ->
ctx.fillStyle = p.fillStyle
viewport.srcToDestRect p.bounds, destRect
x = destRect.x + destRect.w/2
y = destRect.y + destRect.h/2
r = (destRect.w + destRect.h)/4
ctx.beginPath()
ctx.arc x, y, r, 0, 2*Math.PI
ctx.fill()
ctx.closePath()
destRect = new Point
return mark
|
[
{
"context": "tions, data\n\n\n viewAppended: ->\n\n { profile: { firstName, lastName, nickname } } = @getData()\n\n { size,",
"end": 445,
"score": 0.9961620569229126,
"start": 436,
"tag": "NAME",
"value": "firstName"
},
{
"context": "\n\n\n viewAppended: ->\n\n { profile:... | client/app/lib/useritem.coffee | ezgikaysi/koding | 1 | kd = require 'kd'
AvatarView = require 'app/commonviews/avatarviews/avatarview'
KDLoaderView = kd.LoaderView
KDListItemView = kd.ListItemView
KDCustomHTMLView = kd.CustomHTMLView
module.exports = class UserItem extends KDListItemView
constructor: (options = {}, data) ->
options.type = 'user'
options.justFirstName ?= yes
super options, data
viewAppended: ->
{ profile: { firstName, lastName, nickname } } = @getData()
{ size, justFirstName } = @getOptions()
name = if justFirstName then firstName else "#{firstName} #{lastName}"
@avatar = new AvatarView
origin : nickname
size : size or { width: 22, height: 22 }
@name = new KDCustomHTMLView
cssClass : 'name'
partial : name
@addSubView @avatar
@addSubView @name
@addSubView new KDCustomHTMLView
tagName : 'span'
cssClass : 'remove'
click : =>
@getDelegate().emit 'KickUserRequested', this
@addSubView @loader = new KDLoaderView
size : { width : 16 }
cssClass : 'hidden'
setLoadingMode: (disable) ->
if disable
@setClass 'in-progress'
@loader.show()
else
@unsetClass 'in-progress'
@loader.hide()
| 65091 | kd = require 'kd'
AvatarView = require 'app/commonviews/avatarviews/avatarview'
KDLoaderView = kd.LoaderView
KDListItemView = kd.ListItemView
KDCustomHTMLView = kd.CustomHTMLView
module.exports = class UserItem extends KDListItemView
constructor: (options = {}, data) ->
options.type = 'user'
options.justFirstName ?= yes
super options, data
viewAppended: ->
{ profile: { <NAME>, <NAME>, nickname } } = @getData()
{ size, justFirstName } = @getOptions()
name = if justFirstName then firstName else "#{firstName} #{lastName}"
@avatar = new AvatarView
origin : nickname
size : size or { width: 22, height: 22 }
@name = new KDCustomHTMLView
cssClass : 'name'
partial : name
@addSubView @avatar
@addSubView @name
@addSubView new KDCustomHTMLView
tagName : 'span'
cssClass : 'remove'
click : =>
@getDelegate().emit 'KickUserRequested', this
@addSubView @loader = new KDLoaderView
size : { width : 16 }
cssClass : 'hidden'
setLoadingMode: (disable) ->
if disable
@setClass 'in-progress'
@loader.show()
else
@unsetClass 'in-progress'
@loader.hide()
| true | kd = require 'kd'
AvatarView = require 'app/commonviews/avatarviews/avatarview'
KDLoaderView = kd.LoaderView
KDListItemView = kd.ListItemView
KDCustomHTMLView = kd.CustomHTMLView
module.exports = class UserItem extends KDListItemView
constructor: (options = {}, data) ->
options.type = 'user'
options.justFirstName ?= yes
super options, data
viewAppended: ->
{ profile: { PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, nickname } } = @getData()
{ size, justFirstName } = @getOptions()
name = if justFirstName then firstName else "#{firstName} #{lastName}"
@avatar = new AvatarView
origin : nickname
size : size or { width: 22, height: 22 }
@name = new KDCustomHTMLView
cssClass : 'name'
partial : name
@addSubView @avatar
@addSubView @name
@addSubView new KDCustomHTMLView
tagName : 'span'
cssClass : 'remove'
click : =>
@getDelegate().emit 'KickUserRequested', this
@addSubView @loader = new KDLoaderView
size : { width : 16 }
cssClass : 'hidden'
setLoadingMode: (disable) ->
if disable
@setClass 'in-progress'
@loader.show()
else
@unsetClass 'in-progress'
@loader.hide()
|
[
{
"context": "# Copyright (c) 2013 Taher Haveliwala\n# All Rights Reserved\n#\n# entity.coffee\n#\n# See L",
"end": 37,
"score": 0.9998764991760254,
"start": 21,
"tag": "NAME",
"value": "Taher Haveliwala"
},
{
"context": "= (360/1000) * 0.5\n MAX_SPEED = 500\n\n key: 'ship'\n ... | src/coffeescript/entity.coffee | taherh/Astro | 0 | # Copyright (c) 2013 Taher Haveliwala
# All Rights Reserved
#
# entity.coffee
#
# See LICENSE for licensing
#
util = Asteroids.util
class Entity
# orientation in degrees
orientation: 0
wraparound: true
img: null
destroyed: false
constructor: (x, y, vel_x=0, vel_y=0, orientation=0) ->
@pos = {}
@vel = {}
@pos.x = x
@pos.y = y
@vel.x = vel_x
@vel.y = vel_y
gGame.imageManager.loadImage(@key, @setImage)
setImage: (img) =>
unless @destroyed
@img = img
@render()
gGame.canvas.add(@img)
render: =>
if @destroyed and @img?
gGame.canvas.remove(@img)
@img = null
return
[cX, cY, cAngle] = gGame.mathToCanvas(@pos.x, @pos.y, @orientation)
if @img
@img.set(
angle: cAngle
left: cX
top: cY
).setCoords() # setCoords() will update bounding box
update: (deltaTime) ->
return if @destroyed
deltaX = deltaTime/1000 * @vel.x
deltaY = deltaTime/1000 * @vel.y
@pos.x += deltaX
@pos.y += deltaY
width = gGame.GAME_WIDTH
height = gGame.GAME_HEIGHT
if @wraparound
while @pos.x >= width
@pos.x -= width
while @pos.y >= height
@pos.y -= height
while @pos.x < 0
@pos.x += width
while @pos.y < 0
@pos.y += height
else
if (@pos.x < 0 or @pos.x >= width or
@pos.y < 0 or @pos.y >= height)
@outOfBounds()
outOfBounds: () ->
gGame.destroy(this)
collidesWith: (other) ->
if this.img? and other.img?
return this.img.intersectsWithObject(other.img)
else
return false
destroy: ->
@destroyed = true
class Ship extends Entity
ACCEL_INCR = 1/10
ROTATE_INCR = (360/1000) * 0.5
MAX_SPEED = 500
key: 'ship'
thrusterSound: null
constructor: (x, y, x_vel, y_vel) ->
super
@thrusterSound = new Sound('thruster', true)
update: (deltaTime) ->
return if @destroyed
actionState = gGame.inputEngine.actionState
if actionState['turn-left']
@rotateLeft(deltaTime)
if actionState['turn-right']
@rotateRight(deltaTime)
if actionState['accelerate']
@accelerate(deltaTime)
if actionState['decelerate']
@decelerate(deltaTime)
if not actionState['decelerate'] and
not actionState['accelerate']
@decelerate(deltaTime, "coast")
super(deltaTime)
rotateRight: (deltaTime) ->
@orientation -= ROTATE_INCR * deltaTime
if (@orientation < 0)
@orientation += Math.floor(Math.abs(@orientation) / 360)
rotateLeft: (deltaTime) ->
@orientation += ROTATE_INCR * deltaTime
@orientation %= 360
accelerate: (deltaTime) ->
# accelerate along orientation
@vel.x += ACCEL_INCR * deltaTime * Math.cos(util.toRad(@orientation))
@vel.y += ACCEL_INCR * deltaTime * Math.sin(util.toRad(@orientation))
# but make sure we're not going too fast
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
if speed > MAX_SPEED
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * MAX_SPEED
@vel.y = Math.sin(cur_vel_angle) * MAX_SPEED
@thrusterSound.play()
decelerate: (deltaTime, type) ->
if type == "coast"
incr = ACCEL_INCR/4
else
incr = ACCEL_INCR
@thrusterSound.stop()
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
speed -= incr * deltaTime
speed = 0 if speed < 0
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * speed
@vel.y = Math.sin(cur_vel_angle) * speed
destroy: ->
@thrusterSound.stop()
super
class Asteroid extends Entity
key: 'asteroid'
constructor: (x, y, x_vel, y_vel) ->
super
class Bullet extends Entity
VEL_MAG = 200
key: 'bullet'
constructor: (ship) ->
# we derive the bullet's position and velocity vector
# from the ship's
ship_cos = Math.cos(util.toRad(ship.orientation))
ship_sin = Math.sin(util.toRad(ship.orientation))
x = ship.pos.x + ship.img.width/2 * ship_cos
y = ship.pos.y + ship.img.width/2 * ship_sin
x_vel = VEL_MAG * ship_cos
y_vel = VEL_MAG * ship_sin
x_vel += ship.vel.x unless util.sign(ship.vel.x) != util.sign(x_vel)
y_vel += ship.vel.y unless util.sign(ship.vel.y) != util.sign(y_vel)
@wraparound = false
super(x, y , x_vel, y_vel)
exports = this
exports.Entity = Entity
exports.Ship = Ship
exports.Asteroid = Asteroid
exports.Bullet = Bullet
| 159172 | # Copyright (c) 2013 <NAME>
# All Rights Reserved
#
# entity.coffee
#
# See LICENSE for licensing
#
util = Asteroids.util
class Entity
# orientation in degrees
orientation: 0
wraparound: true
img: null
destroyed: false
constructor: (x, y, vel_x=0, vel_y=0, orientation=0) ->
@pos = {}
@vel = {}
@pos.x = x
@pos.y = y
@vel.x = vel_x
@vel.y = vel_y
gGame.imageManager.loadImage(@key, @setImage)
setImage: (img) =>
unless @destroyed
@img = img
@render()
gGame.canvas.add(@img)
render: =>
if @destroyed and @img?
gGame.canvas.remove(@img)
@img = null
return
[cX, cY, cAngle] = gGame.mathToCanvas(@pos.x, @pos.y, @orientation)
if @img
@img.set(
angle: cAngle
left: cX
top: cY
).setCoords() # setCoords() will update bounding box
update: (deltaTime) ->
return if @destroyed
deltaX = deltaTime/1000 * @vel.x
deltaY = deltaTime/1000 * @vel.y
@pos.x += deltaX
@pos.y += deltaY
width = gGame.GAME_WIDTH
height = gGame.GAME_HEIGHT
if @wraparound
while @pos.x >= width
@pos.x -= width
while @pos.y >= height
@pos.y -= height
while @pos.x < 0
@pos.x += width
while @pos.y < 0
@pos.y += height
else
if (@pos.x < 0 or @pos.x >= width or
@pos.y < 0 or @pos.y >= height)
@outOfBounds()
outOfBounds: () ->
gGame.destroy(this)
collidesWith: (other) ->
if this.img? and other.img?
return this.img.intersectsWithObject(other.img)
else
return false
destroy: ->
@destroyed = true
class Ship extends Entity
ACCEL_INCR = 1/10
ROTATE_INCR = (360/1000) * 0.5
MAX_SPEED = 500
key: '<KEY>'
thrusterSound: null
constructor: (x, y, x_vel, y_vel) ->
super
@thrusterSound = new Sound('thruster', true)
update: (deltaTime) ->
return if @destroyed
actionState = gGame.inputEngine.actionState
if actionState['turn-left']
@rotateLeft(deltaTime)
if actionState['turn-right']
@rotateRight(deltaTime)
if actionState['accelerate']
@accelerate(deltaTime)
if actionState['decelerate']
@decelerate(deltaTime)
if not actionState['decelerate'] and
not actionState['accelerate']
@decelerate(deltaTime, "coast")
super(deltaTime)
rotateRight: (deltaTime) ->
@orientation -= ROTATE_INCR * deltaTime
if (@orientation < 0)
@orientation += Math.floor(Math.abs(@orientation) / 360)
rotateLeft: (deltaTime) ->
@orientation += ROTATE_INCR * deltaTime
@orientation %= 360
accelerate: (deltaTime) ->
# accelerate along orientation
@vel.x += ACCEL_INCR * deltaTime * Math.cos(util.toRad(@orientation))
@vel.y += ACCEL_INCR * deltaTime * Math.sin(util.toRad(@orientation))
# but make sure we're not going too fast
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
if speed > MAX_SPEED
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * MAX_SPEED
@vel.y = Math.sin(cur_vel_angle) * MAX_SPEED
@thrusterSound.play()
decelerate: (deltaTime, type) ->
if type == "coast"
incr = ACCEL_INCR/4
else
incr = ACCEL_INCR
@thrusterSound.stop()
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
speed -= incr * deltaTime
speed = 0 if speed < 0
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * speed
@vel.y = Math.sin(cur_vel_angle) * speed
destroy: ->
@thrusterSound.stop()
super
class Asteroid extends Entity
key: 'asteroid'
constructor: (x, y, x_vel, y_vel) ->
super
class Bullet extends Entity
VEL_MAG = 200
key: 'bullet'
constructor: (ship) ->
# we derive the bullet's position and velocity vector
# from the ship's
ship_cos = Math.cos(util.toRad(ship.orientation))
ship_sin = Math.sin(util.toRad(ship.orientation))
x = ship.pos.x + ship.img.width/2 * ship_cos
y = ship.pos.y + ship.img.width/2 * ship_sin
x_vel = VEL_MAG * ship_cos
y_vel = VEL_MAG * ship_sin
x_vel += ship.vel.x unless util.sign(ship.vel.x) != util.sign(x_vel)
y_vel += ship.vel.y unless util.sign(ship.vel.y) != util.sign(y_vel)
@wraparound = false
super(x, y , x_vel, y_vel)
exports = this
exports.Entity = Entity
exports.Ship = Ship
exports.Asteroid = Asteroid
exports.Bullet = Bullet
| true | # Copyright (c) 2013 PI:NAME:<NAME>END_PI
# All Rights Reserved
#
# entity.coffee
#
# See LICENSE for licensing
#
util = Asteroids.util
class Entity
# orientation in degrees
orientation: 0
wraparound: true
img: null
destroyed: false
constructor: (x, y, vel_x=0, vel_y=0, orientation=0) ->
@pos = {}
@vel = {}
@pos.x = x
@pos.y = y
@vel.x = vel_x
@vel.y = vel_y
gGame.imageManager.loadImage(@key, @setImage)
setImage: (img) =>
unless @destroyed
@img = img
@render()
gGame.canvas.add(@img)
render: =>
if @destroyed and @img?
gGame.canvas.remove(@img)
@img = null
return
[cX, cY, cAngle] = gGame.mathToCanvas(@pos.x, @pos.y, @orientation)
if @img
@img.set(
angle: cAngle
left: cX
top: cY
).setCoords() # setCoords() will update bounding box
update: (deltaTime) ->
return if @destroyed
deltaX = deltaTime/1000 * @vel.x
deltaY = deltaTime/1000 * @vel.y
@pos.x += deltaX
@pos.y += deltaY
width = gGame.GAME_WIDTH
height = gGame.GAME_HEIGHT
if @wraparound
while @pos.x >= width
@pos.x -= width
while @pos.y >= height
@pos.y -= height
while @pos.x < 0
@pos.x += width
while @pos.y < 0
@pos.y += height
else
if (@pos.x < 0 or @pos.x >= width or
@pos.y < 0 or @pos.y >= height)
@outOfBounds()
outOfBounds: () ->
gGame.destroy(this)
collidesWith: (other) ->
if this.img? and other.img?
return this.img.intersectsWithObject(other.img)
else
return false
destroy: ->
@destroyed = true
class Ship extends Entity
ACCEL_INCR = 1/10
ROTATE_INCR = (360/1000) * 0.5
MAX_SPEED = 500
key: 'PI:KEY:<KEY>END_PI'
thrusterSound: null
constructor: (x, y, x_vel, y_vel) ->
super
@thrusterSound = new Sound('thruster', true)
update: (deltaTime) ->
return if @destroyed
actionState = gGame.inputEngine.actionState
if actionState['turn-left']
@rotateLeft(deltaTime)
if actionState['turn-right']
@rotateRight(deltaTime)
if actionState['accelerate']
@accelerate(deltaTime)
if actionState['decelerate']
@decelerate(deltaTime)
if not actionState['decelerate'] and
not actionState['accelerate']
@decelerate(deltaTime, "coast")
super(deltaTime)
rotateRight: (deltaTime) ->
@orientation -= ROTATE_INCR * deltaTime
if (@orientation < 0)
@orientation += Math.floor(Math.abs(@orientation) / 360)
rotateLeft: (deltaTime) ->
@orientation += ROTATE_INCR * deltaTime
@orientation %= 360
accelerate: (deltaTime) ->
# accelerate along orientation
@vel.x += ACCEL_INCR * deltaTime * Math.cos(util.toRad(@orientation))
@vel.y += ACCEL_INCR * deltaTime * Math.sin(util.toRad(@orientation))
# but make sure we're not going too fast
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
if speed > MAX_SPEED
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * MAX_SPEED
@vel.y = Math.sin(cur_vel_angle) * MAX_SPEED
@thrusterSound.play()
decelerate: (deltaTime, type) ->
if type == "coast"
incr = ACCEL_INCR/4
else
incr = ACCEL_INCR
@thrusterSound.stop()
speed = Math.sqrt(Math.pow(@vel.x, 2) + Math.pow(@vel.y, 2))
speed -= incr * deltaTime
speed = 0 if speed < 0
cur_vel_angle = util.getAngle(@vel.y, @vel.x)
@vel.x = Math.cos(cur_vel_angle) * speed
@vel.y = Math.sin(cur_vel_angle) * speed
destroy: ->
@thrusterSound.stop()
super
class Asteroid extends Entity
key: 'asteroid'
constructor: (x, y, x_vel, y_vel) ->
super
class Bullet extends Entity
VEL_MAG = 200
key: 'bullet'
constructor: (ship) ->
# we derive the bullet's position and velocity vector
# from the ship's
ship_cos = Math.cos(util.toRad(ship.orientation))
ship_sin = Math.sin(util.toRad(ship.orientation))
x = ship.pos.x + ship.img.width/2 * ship_cos
y = ship.pos.y + ship.img.width/2 * ship_sin
x_vel = VEL_MAG * ship_cos
y_vel = VEL_MAG * ship_sin
x_vel += ship.vel.x unless util.sign(ship.vel.x) != util.sign(x_vel)
y_vel += ship.vel.y unless util.sign(ship.vel.y) != util.sign(y_vel)
@wraparound = false
super(x, y , x_vel, y_vel)
exports = this
exports.Entity = Entity
exports.Ship = Ship
exports.Asteroid = Asteroid
exports.Bullet = Bullet
|
[
{
"context": "r compose file with the new image.\n#\n# Author:\n# Kevin Li\n\nfs = require(\"fs\")\npath = require 'path'\n\ndefaul",
"end": 1352,
"score": 0.9984375834465027,
"start": 1344,
"tag": "NAME",
"value": "Kevin Li"
}
] | src/ecs.coffee | mlf4aiur/hubot-ecs | 0 | # Description:
# A hubot script for manage AWS ECS cluster
#
# Dependencies:
# "shelljs": ">= 0.5.3"
#
# Configuration
# HUBOT_ECS_AUTHORIZED_ROLES - Restrict users with a list of authorized roles (need to install hubot-auth)
# HUBOT_ECS_CLUSTER_PATH - The directory to put your ECS cluster Docker compose file
#
# Commands:
# hubot ecs-cli list-cluster - Lists all of the ECS clusters.
# hubot ecs-cli <cluster name> list-project - Lists all of the ECS projects in your cluster.
# hubot ecs-cli <cluster name> ps - Lists all of the running containers in default ECS cluster.
# hubot ecs-cli <cluster name> <project name> compose service ps - Lists all the containers in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> compose service up - Creates an ECS service from your compose file (if it does not already exist) and runs one instance of that task on your cluster (a combination of create and start). This command updates the desired count of the service to 1.
# hubot ecs-cli <cluster name> <project name> list-image - Lists all the images in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> update-image <new image> - Updates your compose file with the new image.
#
# Author:
# Kevin Li
fs = require("fs")
path = require 'path'
defaultECSClusterPath = path.join __dirname, '..', 'ecs_cluster'
if process.env.HUBOT_ECS_AUTHORIZED_ROLES
authorizedRoles = process.env.HUBOT_ECS_AUTHORIZED_ROLES
else
authorizedRoles = 'admin,ecs_admin'
if process.env.HUBOT_ECS_CLUSTER_PATH
hubotECSClusterPath = process.env.HUBOT_ECS_CLUSTER_PATH
else
hubotECSClusterPath = defaultECSClusterPath
if not fs.existsSync(hubotECSClusterPath)
console.log "#{hubotECSClusterPath} not found in hubot working dir, using to default dir: #{hubotECSClusterPath}"
hubotECSClusterPath = defaultECSClusterPath
imagePattern = /// ^ # begin of line
([-.\w]+) # domain
:?([\d]+)? # port
/([-\w]+) # repo
/?([-\w]+)? # img
:?([-.\w]+)? # tag
$ /// # end of line
rootRepoPattern = /// ^ # begin of line
([-\w]+) # img
:?([-.\w]+)? # tag
$ /// # end of line
hasAnAuthorizedRole = (robot, user, cluster) ->
for r in robot.auth.userRoles user
return true if r in authorizedRoles.split(',') or r is "#{cluster}_admin"
return false
isAuthorized = (robot, user, cluster, res) ->
hasHubotAuth = robot.auth? and robot.auth.hasRole?
mustRestrictWithRoles = hasHubotAuth and authorizedRoles?
(not mustRestrictWithRoles) or hasAnAuthorizedRole robot, user, cluster
displayResult = (robot, res, cluster, project, command, text) ->
if robot.adapterName != "slack"
res.reply "Cluster: #{cluster}, Project: #{project}\nCommand: #{command}\n#{text}"
else
robot.emit 'slack-attachment',
channel: "#{res.message.user.room}"
content:
color: "#55acee"
fallback: "#{text}"
title: "#{cluster}-#{project}: #{command}"
text: "#{text}"
getClusters = (srcPath) ->
clusters = (file for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isDirectory())
getProjects = (srcPath) ->
projects = (file.substring(0, file.length - 4) for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isFile() and file.endsWith('.yml'))
runECSCompose = (robot, cluster, project, command, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
ecsCli = "ecs-cli compose --file #{composeFile} --project-name #{project} #{command}"
console.log ecsCli
shell.exec ecsCli, {async:true}, (code, stdout, stderr) ->
displayResult robot, res, cluster, project, "compose #{command}", stdout
listImage = (robot, cluster, project, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
result = []
for line in fs.readFileSync(composeFile).toString().split '\n'
if line.match /^[^\s]+:\s*$/
result.push line
if line.match /^[\s]+image:/
result.push line
displayResult robot, res, cluster, project, "list-image", result.join '\n'
parseImage = (image) ->
if image.match(imagePattern)
imageMatch = image.match(imagePattern)
domain = imageMatch[1]
port = imageMatch[2]
repo = imageMatch[3]
img = imageMatch[4]
tag = imageMatch[5]
useImage = ''
if domain
useImage = domain
if port
useImage = "#{useImage}:#{port}"
if repo
useImage = "#{useImage}/#{repo}"
if img
useImage = "#{useImage}/#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else if image.match(rootRepoPattern)
imageMatch = image.match(rootRepoPattern)
img = imageMatch[1]
tag = imageMatch[2]
if img
useImage = "#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else
return [undefined, undefined]
updateImage = (robot, cluster, project, useImage, imageWithoutTag, res) ->
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
imageWithoutTagPattern = new RegExp("(^[\\s]+image:[\\s]+)(#{imageWithoutTag}.*)$")
newContent = []
oldContent = fs.readFileSync(composeFile).toString()
for line in oldContent.split '\n'
imageWithoutTagMatch = line.match imageWithoutTagPattern
if imageWithoutTagMatch
newLine = "#{imageWithoutTagMatch[1]}#{useImage}"
newContent.push newLine
text = "old image: #{imageWithoutTagMatch[2]}\nnew image: #{useImage}"
else
newContent.push line
fs.writeFileSync composeFile, newContent.join '\n'
console.log "ecs-cli update-image: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Result: #{text}"
displayResult robot, res, cluster, project, "update-image", text
module.exports = (robot) ->
# hubot ecs-cli list-cluster
robot.respond /ecs-cli\s+list-cluster/i, (res) ->
clusters = getClusters hubotECSClusterPath
console.log "ecs-cli list-cluster: User: #{res.message.user.name}"
res.reply "Clusters:\n#{clusters.join('\n')}"
# hubot ecs-cli <cluster name> list-project
robot.respond /ecs-cli\s+([^\s]+)\s+list-project/i, (res) ->
cluster = res.match[1].trim() || 'default'
clusterPath = path.join hubotECSClusterPath, cluster
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-project: User: #{res.message.user.name}, Cluster: #{cluster}, Cluster path: #{clusterPath}"
if authorized
if fs.existsSync clusterPath
projects = getProjects clusterPath
res.reply "Projects in #{cluster}:\n#{projects.join('\n')}"
else
res.reply "Cluster: #{cluster} not exists"
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> compose <sub command>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+compose\s+(.+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
command = res.match[3].trim() || '--help'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli compose: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: #{command}"
if authorized
runECSCompose robot, cluster, project, command, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> list-image
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+list-image/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: list-image"
if authorized
listImage robot, cluster, project, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> update-image <new image>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+update-image\s+([^\s]+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
image = res.match[3].trim() || '--help'
if image.startsWith 'http://'
image = image.slice 7
if image.startsWith 'https://'
image = image.slice 8
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli update-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: update-image, image: #{image}"
if authorized
[useImage, imageWithoutTag] = parseImage image
if useImage
updateImage robot, cluster, project, useImage, imageWithoutTag, res
else
text = "Unable to parse image name: #{image}, check the format and try again."
displayResult robot, res, cluster, project, "update-image", text
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
| 210020 | # Description:
# A hubot script for manage AWS ECS cluster
#
# Dependencies:
# "shelljs": ">= 0.5.3"
#
# Configuration
# HUBOT_ECS_AUTHORIZED_ROLES - Restrict users with a list of authorized roles (need to install hubot-auth)
# HUBOT_ECS_CLUSTER_PATH - The directory to put your ECS cluster Docker compose file
#
# Commands:
# hubot ecs-cli list-cluster - Lists all of the ECS clusters.
# hubot ecs-cli <cluster name> list-project - Lists all of the ECS projects in your cluster.
# hubot ecs-cli <cluster name> ps - Lists all of the running containers in default ECS cluster.
# hubot ecs-cli <cluster name> <project name> compose service ps - Lists all the containers in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> compose service up - Creates an ECS service from your compose file (if it does not already exist) and runs one instance of that task on your cluster (a combination of create and start). This command updates the desired count of the service to 1.
# hubot ecs-cli <cluster name> <project name> list-image - Lists all the images in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> update-image <new image> - Updates your compose file with the new image.
#
# Author:
# <NAME>
fs = require("fs")
path = require 'path'
defaultECSClusterPath = path.join __dirname, '..', 'ecs_cluster'
if process.env.HUBOT_ECS_AUTHORIZED_ROLES
authorizedRoles = process.env.HUBOT_ECS_AUTHORIZED_ROLES
else
authorizedRoles = 'admin,ecs_admin'
if process.env.HUBOT_ECS_CLUSTER_PATH
hubotECSClusterPath = process.env.HUBOT_ECS_CLUSTER_PATH
else
hubotECSClusterPath = defaultECSClusterPath
if not fs.existsSync(hubotECSClusterPath)
console.log "#{hubotECSClusterPath} not found in hubot working dir, using to default dir: #{hubotECSClusterPath}"
hubotECSClusterPath = defaultECSClusterPath
imagePattern = /// ^ # begin of line
([-.\w]+) # domain
:?([\d]+)? # port
/([-\w]+) # repo
/?([-\w]+)? # img
:?([-.\w]+)? # tag
$ /// # end of line
rootRepoPattern = /// ^ # begin of line
([-\w]+) # img
:?([-.\w]+)? # tag
$ /// # end of line
hasAnAuthorizedRole = (robot, user, cluster) ->
for r in robot.auth.userRoles user
return true if r in authorizedRoles.split(',') or r is "#{cluster}_admin"
return false
isAuthorized = (robot, user, cluster, res) ->
hasHubotAuth = robot.auth? and robot.auth.hasRole?
mustRestrictWithRoles = hasHubotAuth and authorizedRoles?
(not mustRestrictWithRoles) or hasAnAuthorizedRole robot, user, cluster
displayResult = (robot, res, cluster, project, command, text) ->
if robot.adapterName != "slack"
res.reply "Cluster: #{cluster}, Project: #{project}\nCommand: #{command}\n#{text}"
else
robot.emit 'slack-attachment',
channel: "#{res.message.user.room}"
content:
color: "#55acee"
fallback: "#{text}"
title: "#{cluster}-#{project}: #{command}"
text: "#{text}"
getClusters = (srcPath) ->
clusters = (file for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isDirectory())
getProjects = (srcPath) ->
projects = (file.substring(0, file.length - 4) for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isFile() and file.endsWith('.yml'))
runECSCompose = (robot, cluster, project, command, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
ecsCli = "ecs-cli compose --file #{composeFile} --project-name #{project} #{command}"
console.log ecsCli
shell.exec ecsCli, {async:true}, (code, stdout, stderr) ->
displayResult robot, res, cluster, project, "compose #{command}", stdout
listImage = (robot, cluster, project, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
result = []
for line in fs.readFileSync(composeFile).toString().split '\n'
if line.match /^[^\s]+:\s*$/
result.push line
if line.match /^[\s]+image:/
result.push line
displayResult robot, res, cluster, project, "list-image", result.join '\n'
parseImage = (image) ->
if image.match(imagePattern)
imageMatch = image.match(imagePattern)
domain = imageMatch[1]
port = imageMatch[2]
repo = imageMatch[3]
img = imageMatch[4]
tag = imageMatch[5]
useImage = ''
if domain
useImage = domain
if port
useImage = "#{useImage}:#{port}"
if repo
useImage = "#{useImage}/#{repo}"
if img
useImage = "#{useImage}/#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else if image.match(rootRepoPattern)
imageMatch = image.match(rootRepoPattern)
img = imageMatch[1]
tag = imageMatch[2]
if img
useImage = "#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else
return [undefined, undefined]
updateImage = (robot, cluster, project, useImage, imageWithoutTag, res) ->
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
imageWithoutTagPattern = new RegExp("(^[\\s]+image:[\\s]+)(#{imageWithoutTag}.*)$")
newContent = []
oldContent = fs.readFileSync(composeFile).toString()
for line in oldContent.split '\n'
imageWithoutTagMatch = line.match imageWithoutTagPattern
if imageWithoutTagMatch
newLine = "#{imageWithoutTagMatch[1]}#{useImage}"
newContent.push newLine
text = "old image: #{imageWithoutTagMatch[2]}\nnew image: #{useImage}"
else
newContent.push line
fs.writeFileSync composeFile, newContent.join '\n'
console.log "ecs-cli update-image: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Result: #{text}"
displayResult robot, res, cluster, project, "update-image", text
module.exports = (robot) ->
# hubot ecs-cli list-cluster
robot.respond /ecs-cli\s+list-cluster/i, (res) ->
clusters = getClusters hubotECSClusterPath
console.log "ecs-cli list-cluster: User: #{res.message.user.name}"
res.reply "Clusters:\n#{clusters.join('\n')}"
# hubot ecs-cli <cluster name> list-project
robot.respond /ecs-cli\s+([^\s]+)\s+list-project/i, (res) ->
cluster = res.match[1].trim() || 'default'
clusterPath = path.join hubotECSClusterPath, cluster
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-project: User: #{res.message.user.name}, Cluster: #{cluster}, Cluster path: #{clusterPath}"
if authorized
if fs.existsSync clusterPath
projects = getProjects clusterPath
res.reply "Projects in #{cluster}:\n#{projects.join('\n')}"
else
res.reply "Cluster: #{cluster} not exists"
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> compose <sub command>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+compose\s+(.+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
command = res.match[3].trim() || '--help'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli compose: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: #{command}"
if authorized
runECSCompose robot, cluster, project, command, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> list-image
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+list-image/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: list-image"
if authorized
listImage robot, cluster, project, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> update-image <new image>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+update-image\s+([^\s]+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
image = res.match[3].trim() || '--help'
if image.startsWith 'http://'
image = image.slice 7
if image.startsWith 'https://'
image = image.slice 8
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli update-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: update-image, image: #{image}"
if authorized
[useImage, imageWithoutTag] = parseImage image
if useImage
updateImage robot, cluster, project, useImage, imageWithoutTag, res
else
text = "Unable to parse image name: #{image}, check the format and try again."
displayResult robot, res, cluster, project, "update-image", text
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
| true | # Description:
# A hubot script for manage AWS ECS cluster
#
# Dependencies:
# "shelljs": ">= 0.5.3"
#
# Configuration
# HUBOT_ECS_AUTHORIZED_ROLES - Restrict users with a list of authorized roles (need to install hubot-auth)
# HUBOT_ECS_CLUSTER_PATH - The directory to put your ECS cluster Docker compose file
#
# Commands:
# hubot ecs-cli list-cluster - Lists all of the ECS clusters.
# hubot ecs-cli <cluster name> list-project - Lists all of the ECS projects in your cluster.
# hubot ecs-cli <cluster name> ps - Lists all of the running containers in default ECS cluster.
# hubot ecs-cli <cluster name> <project name> compose service ps - Lists all the containers in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> compose service up - Creates an ECS service from your compose file (if it does not already exist) and runs one instance of that task on your cluster (a combination of create and start). This command updates the desired count of the service to 1.
# hubot ecs-cli <cluster name> <project name> list-image - Lists all the images in your cluster that belong to the service created with the compose project.
# hubot ecs-cli <cluster name> <project name> update-image <new image> - Updates your compose file with the new image.
#
# Author:
# PI:NAME:<NAME>END_PI
fs = require("fs")
path = require 'path'
defaultECSClusterPath = path.join __dirname, '..', 'ecs_cluster'
if process.env.HUBOT_ECS_AUTHORIZED_ROLES
authorizedRoles = process.env.HUBOT_ECS_AUTHORIZED_ROLES
else
authorizedRoles = 'admin,ecs_admin'
if process.env.HUBOT_ECS_CLUSTER_PATH
hubotECSClusterPath = process.env.HUBOT_ECS_CLUSTER_PATH
else
hubotECSClusterPath = defaultECSClusterPath
if not fs.existsSync(hubotECSClusterPath)
console.log "#{hubotECSClusterPath} not found in hubot working dir, using to default dir: #{hubotECSClusterPath}"
hubotECSClusterPath = defaultECSClusterPath
imagePattern = /// ^ # begin of line
([-.\w]+) # domain
:?([\d]+)? # port
/([-\w]+) # repo
/?([-\w]+)? # img
:?([-.\w]+)? # tag
$ /// # end of line
rootRepoPattern = /// ^ # begin of line
([-\w]+) # img
:?([-.\w]+)? # tag
$ /// # end of line
hasAnAuthorizedRole = (robot, user, cluster) ->
for r in robot.auth.userRoles user
return true if r in authorizedRoles.split(',') or r is "#{cluster}_admin"
return false
isAuthorized = (robot, user, cluster, res) ->
hasHubotAuth = robot.auth? and robot.auth.hasRole?
mustRestrictWithRoles = hasHubotAuth and authorizedRoles?
(not mustRestrictWithRoles) or hasAnAuthorizedRole robot, user, cluster
displayResult = (robot, res, cluster, project, command, text) ->
if robot.adapterName != "slack"
res.reply "Cluster: #{cluster}, Project: #{project}\nCommand: #{command}\n#{text}"
else
robot.emit 'slack-attachment',
channel: "#{res.message.user.room}"
content:
color: "#55acee"
fallback: "#{text}"
title: "#{cluster}-#{project}: #{command}"
text: "#{text}"
getClusters = (srcPath) ->
clusters = (file for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isDirectory())
getProjects = (srcPath) ->
projects = (file.substring(0, file.length - 4) for file in fs.readdirSync(srcPath) when fs.statSync(path.join(srcPath, file)).isFile() and file.endsWith('.yml'))
runECSCompose = (robot, cluster, project, command, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
ecsCli = "ecs-cli compose --file #{composeFile} --project-name #{project} #{command}"
console.log ecsCli
shell.exec ecsCli, {async:true}, (code, stdout, stderr) ->
displayResult robot, res, cluster, project, "compose #{command}", stdout
listImage = (robot, cluster, project, res) ->
shell = require('shelljs')
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
result = []
for line in fs.readFileSync(composeFile).toString().split '\n'
if line.match /^[^\s]+:\s*$/
result.push line
if line.match /^[\s]+image:/
result.push line
displayResult robot, res, cluster, project, "list-image", result.join '\n'
parseImage = (image) ->
if image.match(imagePattern)
imageMatch = image.match(imagePattern)
domain = imageMatch[1]
port = imageMatch[2]
repo = imageMatch[3]
img = imageMatch[4]
tag = imageMatch[5]
useImage = ''
if domain
useImage = domain
if port
useImage = "#{useImage}:#{port}"
if repo
useImage = "#{useImage}/#{repo}"
if img
useImage = "#{useImage}/#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else if image.match(rootRepoPattern)
imageMatch = image.match(rootRepoPattern)
img = imageMatch[1]
tag = imageMatch[2]
if img
useImage = "#{img}"
imageWithoutTag = useImage
if tag
useImage = "#{useImage}:#{tag}"
return [useImage, imageWithoutTag]
else
return [undefined, undefined]
updateImage = (robot, cluster, project, useImage, imageWithoutTag, res) ->
projectPath = path.join hubotECSClusterPath, cluster, project
composeFile = "#{projectPath}.yml"
imageWithoutTagPattern = new RegExp("(^[\\s]+image:[\\s]+)(#{imageWithoutTag}.*)$")
newContent = []
oldContent = fs.readFileSync(composeFile).toString()
for line in oldContent.split '\n'
imageWithoutTagMatch = line.match imageWithoutTagPattern
if imageWithoutTagMatch
newLine = "#{imageWithoutTagMatch[1]}#{useImage}"
newContent.push newLine
text = "old image: #{imageWithoutTagMatch[2]}\nnew image: #{useImage}"
else
newContent.push line
fs.writeFileSync composeFile, newContent.join '\n'
console.log "ecs-cli update-image: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Result: #{text}"
displayResult robot, res, cluster, project, "update-image", text
module.exports = (robot) ->
# hubot ecs-cli list-cluster
robot.respond /ecs-cli\s+list-cluster/i, (res) ->
clusters = getClusters hubotECSClusterPath
console.log "ecs-cli list-cluster: User: #{res.message.user.name}"
res.reply "Clusters:\n#{clusters.join('\n')}"
# hubot ecs-cli <cluster name> list-project
robot.respond /ecs-cli\s+([^\s]+)\s+list-project/i, (res) ->
cluster = res.match[1].trim() || 'default'
clusterPath = path.join hubotECSClusterPath, cluster
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-project: User: #{res.message.user.name}, Cluster: #{cluster}, Cluster path: #{clusterPath}"
if authorized
if fs.existsSync clusterPath
projects = getProjects clusterPath
res.reply "Projects in #{cluster}:\n#{projects.join('\n')}"
else
res.reply "Cluster: #{cluster} not exists"
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> compose <sub command>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+compose\s+(.+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
command = res.match[3].trim() || '--help'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli compose: User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: #{command}"
if authorized
runECSCompose robot, cluster, project, command, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> list-image
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+list-image/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli list-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: list-image"
if authorized
listImage robot, cluster, project, res
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
# hubot ecs-cli <cluster name> <project name> update-image <new image>
robot.respond /ecs-cli\s+([^\s]+)\s+([^\s]+)\s+update-image\s+([^\s]+)/i, (res) ->
cluster = res.match[1].trim() || 'default'
project = res.match[2].trim() || 'default'
image = res.match[3].trim() || '--help'
if image.startsWith 'http://'
image = image.slice 7
if image.startsWith 'https://'
image = image.slice 8
authorized = isAuthorized robot, res.envelope.user, cluster, res
console.log "ecs-cli update-image User: #{res.message.user.name}, Cluster: #{cluster}, Project: #{project}, Command: update-image, image: #{image}"
if authorized
[useImage, imageWithoutTag] = parseImage image
if useImage
updateImage robot, cluster, project, useImage, imageWithoutTag, res
else
text = "Unable to parse image name: #{image}, check the format and try again."
displayResult robot, res, cluster, project, "update-image", text
else
res.reply "I can't do that, you need at least one of these roles: #{authorizedRoles},#{cluster}_admin"
|
[
{
"context": "\n { account: accounts[0], password: Password }\n _handleChange: (event) ->\n ",
"end": 3730,
"score": 0.9990217685699463,
"start": 3722,
"tag": "PASSWORD",
"value": "Password"
},
{
"context": "Element 'input', { type: 'password', uniqu... | lib/ethereum-interface.coffee | bellaj/atom-ethereum-interface | 3 | AtomSolidityView = require './ethereum-interface-view'
path = require 'path'
fs = require 'fs'
{CompositeDisposable} = require 'atom'
Web3 = require 'web3'
React = require 'react'
ReactDOM = require 'react-dom'
{MessagePanelView, PlainMessageView, LineMessageView} = require 'atom-message-panel'
Coinbase = ''
Password = ''
rpcAddress = atom.config.get('atom-ethereum-interface.rpcAddress')
if typeof web3 != 'undefined'
web3 = new Web3(web3.currentProvider)
else
web3 = new Web3(new (Web3.providers.HttpProvider)(rpcAddress))
module.exports = AtomSolidity =
atomSolidityView: null
modalPanel: null
subscriptions: null
activate: (state) ->
@atomSolidityView = new AtomSolidityView(state.atomSolidityViewState)
@modalPanel = atom.workspace.addRightPanel(item: @atomSolidityView.getElement(), visible: false)
atom.config.observe 'atom-ethereum-interface.rpcAddress', (newValue) ->
# TODO: add url validation
urlPattern = new RegExp('(http)://?')
if urlPattern.test(newValue)
rpcAddress = newValue
# Empty global variable compiled
@compiled = {}
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register command that toggles this view
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:compile': => @compile()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:build': => @build()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:create': => @create()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:toggle': => @toggleView()
deactivate: ->
@modalPanel.destroy()
@subscriptions.dispose()
@atomSolidityView.destroy()
serialize: ->
atomSolidityViewState: @atomSolidityView.serialize()
checkConnection: (callback)->
that = this
if !web3.isConnected()
callback('Error could not connect to local geth instance!', null)
else
# If passphrase is not already set
if Password == ''
# Set coinbase
# List all accounts and set selected as coinbase
accounts = web3.eth.accounts
that.getBaseAccount accounts, (err, callback) ->
if err
console.log err
else
Coinbase = callback.account
Password = callback.password
# Check if account is locked ? then prompt for password
that.checkUnlock (err, callback) ->
callback(null, true)
callback(null, true)
checkUnlock: (Coinbase, callback) ->
# web3.personal.unlockAccount("Coinbase", password)
console.log "In checkUnlock"
toggleView: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
showErrorMessage: (line, message, callback) ->
messages = new MessagePanelView(title: 'Solidity compiler messages')
messages.attach()
messages.add new LineMessageView(line: line, message: message, className: 'red-message')
getBaseAccount: (accounts, callback) ->
# Here we will select baseAccount for rest of the operations
# we will also get password for that account
that = this
createAddressList = React.createClass(
displayName: 'addressList'
getInitialState: ->
{ account: accounts[0], password: Password }
_handleChange: (event) ->
this.setState { account: event.target.value }
_handlePasswordChange: (event) ->
this.setState { password: event.target.value }
_handlePassword: (event) ->
event.preventDefault()
# Return account and password
callback(null, this.state)
render: ->
# create dropdown list for accounts
React.createElement 'div', { htmlFor: 'acc-n-pass', className: 'icon icon-link' },
React.createElement 'select', { onChange: this._handleChange, value: this.state.account }, accounts.map (account, i) ->
React.createElement 'option', { value: account }, account #options are address
React.createElement 'form', { onSubmit: this._handlePassword, className: 'icon icon-lock' },
React.createElement 'input', { type: 'password', uniqueName: "password", placeholder: "Password", value: this.state.password, onChange: this._handlePasswordChange }
React.createElement 'input', { type: 'submit', value: 'Unlock' }
)
ReactDOM.render React.createElement(createAddressList), document.getElementById('accounts-list')
callback(null, { account: accounts[0], password: '' })
combineSource: (dir, source, imports) ->
that = this
o = { encoding: 'UTF-8' }
ir = /import\ [\'\"](.+)[\'\"]\;/g
match = null
while (match = ir.exec(source))
iline = match[0]
fn = match[1]
# empty out already imported
if imports[fn]
source = source.replace(iline, '')
continue
imports[fn] = 1
subSource = fs.readFileSync("#{dir}/#{fn}", o)
match.source = that.combineSource(dir, subSource, imports)
source = source.replace(iline, match.source)
return source
compile: ->
that = this
editor = atom.workspace.getActiveTextEditor()
filePath = editor.getPath()
dir = path.dirname(filePath)
source = that.combineSource(dir, editor.getText(), {})
@checkConnection (error, callback) ->
if error
console.error error
that.showErrorMessage 0, 'Error could not connect to local geth instance!'
else
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
###
# TODO: Handle Compilation asynchronously and handle errors
###
that.compiled = web3.eth.compile.solidity(source)
# Clean View before creating
that.atomSolidityView.destroyCompiled()
console.log that.compiled
# Create inpus for every contract
for contractName of that.compiled
# Get estimated gas
estimatedGas = web3.eth.estimateGas { from: web3.eth.defaultAccount, data: that.compiled[contractName].code, gas: 1000000 }
###
# TODO: Use asynchronous call
web3.eth.estimateGas({from: '0xmyaccout...', data: "0xc6888fa1fffffffffff…..", gas: 500000 }, function(err, result){
if(!err && result !=== 500000) { … }
});
###
# contractName is the name of contract in JSON object
bytecode = that.compiled[contractName].code
# Get contract abi
ContractABI = that.compiled[contractName].info.abiDefinition
# get constructors for rendering display
inputs = []
for abiObj of ContractABI
if ContractABI[abiObj].type is "constructor" && ContractABI[abiObj].inputs.length > 0
inputs = ContractABI[abiObj].inputs
# Create view
that.atomSolidityView.setContractView(contractName, bytecode, ContractABI, inputs, estimatedGas)
# Show contract code
if not that.modalPanel.isVisible()
that.modalPanel.show()
return
build: ->
that = this
constructVars = []
i = 0
console.log @compiled
for contractName of @compiled
variables = []
estimatedGas = 0
if document.getElementById(contractName + '_create')
# contractName is the name of contract in JSON object
bytecode = @compiled[contractName].code
# Get contract abi
ContractABI = @compiled[contractName].info.abiDefinition
# Collect variable inputs
inputVars = if document.getElementById(contractName + '_inputs') then document.getElementById(contractName + '_inputs').getElementsByTagName('input')
if inputVars
while i < inputVars.length
if inputVars.item(i).getAttribute('id') == contractName + '_gas'
estimatedGas = inputVars.item(i).value
inputVars.item(i).readOnly = true
break
inputObj = {
"varName": inputVars.item(i).getAttribute('id'),
"varValue": inputVars.item(i).value
}
variables[i] = inputObj
inputVars.item(i).readOnly = true
if inputVars.item(i).nextSibling.getAttribute('id') == contractName + '_create'
break
else
i++
constructVars[contractName] = {
'contractName': contractName,
'inputVariables': variables,
'estimatedGas': estimatedGas
}
# Create React element for create button
createButton = React.createClass(
displayName: 'createButton'
_handleSubmit: ->
console.log constructVars
that.create(that.compiled[Object.keys(this.refs)[0]].info.abiDefinition, that.compiled[Object.keys(this.refs)[0]].code, constructVars[Object.keys(this.refs)[0]], Object.keys(this.refs)[0], constructVars[Object.keys(this.refs)[0]].estimatedGas)
render: ->
React.createElement('form', { onSubmit: this._handleSubmit },
React.createElement('input', {type: 'submit', value: 'Create', ref: contractName, className: 'btn btn-primary inline-block-tight'}, null))
)
ReactDOM.render React.createElement(createButton, null), document.getElementById(contractName + '_create')
prepareEnv: (contractName, callback) ->
if document.getElementById(@contractName + '_create')
document.getElementById(@contractName + '_create').style.visibility = 'hidden'
document.getElementById(@contractName + '_stat').innerText = 'transaction sent, waiting for confirmation...'
callback(null, true)
else
e = new Error('Could not parse input')
callback(e, null)
# our asyncLoop
asyncLoop: (iterations, func, callback) ->
index = 0
done = false
cycle =
next: ->
if done
return
if index < iterations
index++
func cycle
else
done = true
callback()
iteration: ->
index - 1
break: ->
done = true
callback()
cycle.next()
cycle
# Construct function buttons from abi
constructFunctions: (@contractABI, callback) ->
for contractFunction in contractABI
if contractFunction.type = 'function' and contractFunction.name != null and contractFunction.name != undefined
@createChilds contractFunction, (error, childInputs) ->
if !error
callback(null, [contractFunction.name, childInputs])
else
callback(null, [null, null])
createChilds: (contractFunction, callback) ->
reactElements = []
i = 0
if contractFunction.inputs.length > 0
while i < contractFunction.inputs.length
reactElements[i] = [contractFunction.inputs[i].type, contractFunction.inputs[i].name]
i++
callback(null, reactElements)
# Construct react child inputs
create: (@abi, @code, @constructVars, @contractName, @estimatedGas) ->
that = this
@estimatedGas = if @estimatedGas > 0 then @estimatedGas else 1000000
if Password == ''
e = new Error('Empty password')
console.error ("Empty password")
@showErrorMessage 0, "No password provided"
return
# hide create button
@prepareEnv @contractName, (err, callback) ->
if err
console.error err
else
# Use coinbase
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
# set variables and render display
constructorS = []
for i in that.constructVars.inputVariables
constructorS.push i.varValue
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
web3.eth.contract(that.abi).new constructorS.toString(), { data: that.code, from: web3.eth.defaultAccount, gas: that.estimatedGas }, (err, contract) ->
if err
console.error err
that.showErrorMessage 129, err
return
# callback fires twice, we only want the second call when the contract is deployed
else if contract.address
myContract = contract
console.log 'address: ' + myContract.address
document.getElementById(that.contractName + '_stat').innerText = 'Mined!'
document.getElementById(that.contractName + '_stat').setAttribute('class', 'icon icon-zap') # Add icon class
document.getElementById(that.contractName + '_address').innerText = myContract.address
document.getElementById(that.contractName + '_address').setAttribute('class', 'icon icon-key') # Add icon class
# Check every key, if it is a function create call buttons,
# for every function there could be many call methods,
# for every method there cpould be many inputs
# Innermost callback will have inputs for all abi objects
# Lets think the Innermost function
# Construct view for function call view
functionABI = React.createClass(
displayName: 'callFunctions'
getInitialState: ->
{ childFunctions: [] }
componentDidMount: ->
self = this
that.constructFunctions that.abi, (error, childFunctions) ->
if !error
self.state.childFunctions.push(childFunctions)
self.forceUpdate()
_handleChange: (childFunction, event) ->
console.log event.target.value
this.setState { value: event.target.value }
_handleSubmit: (childFunction, event) ->
# Get arguments ready here
that.argsToArray this.refs, childFunction, (error, argArray) ->
if !error
that.call(myContract, childFunction, argArray)
render: ->
self = this
React.createElement 'div', { htmlFor: 'contractFunctions' }, this.state.childFunctions.map((childFunction, i) ->
React.createElement 'form', { onSubmit: self._handleSubmit.bind(this, childFunction[0]), key: i, ref: childFunction[0] },
React.createElement 'input', { type: 'submit', readOnly: 'true', value: childFunction[0], className: 'text-subtle call-button' }
childFunction[1].map((childInput, j) ->
React.createElement 'input', { tye: 'text', handleChange: self._handleChange, placeholder: childInput[0] + ' ' + childInput[1], className: 'call-button-values' }#, ref: if childFunction[0] then childFunction[0][j] else "Constructor" }
)
)
)
ReactDOM.render React.createElement(functionABI), document.getElementById(that.contractName + '_call')
else if !contract.address
contractStat = React.createClass(
render: ->
React.createElement 'div', { htmlFor: 'contractStat' },
React.createElement 'span', { className: 'inline-block highlight' }, 'TransactionHash: '
React.createElement 'pre', { className: 'large-code' }, contract.transactionHash
React.createElement 'span', { className: 'stat-mining stat-mining-align' }, 'waiting to be mined '
React.createElement 'span', { className: 'loading loading-spinner-tiny inline-block stat-mining-align' }
)
ReactDOM.render React.createElement(contractStat), document.getElementById(that.contractName + '_stat')
# document.getElementById(that.contractName + '_stat').innerText = "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
console.log "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
showOutput: (address, output) ->
messages = new MessagePanelView(title: 'Solidity compiler output')
messages.attach()
address = 'Contract address: ' + address
output = 'Contract output: ' + output
messages.add new PlainMessageView(message: address, className: 'green-message')
messages.add new PlainMessageView(message: output, className: 'green-message')
argsToArray: (@reactElements, @childFunction, callback) ->
that = this
# For every childNodes of childFunction
# Get value of childFunction
# Trim value having name of the function
args = new Array()
@asyncLoop @reactElements[@childFunction].childNodes.length, ((cycle) ->
if that.reactElements[that.childFunction][cycle.iteration()].type != 'submit'
args.push(that.reactElements[that.childFunction][cycle.iteration()].value)
cycle.next()
), ->
callback(null, args)
checkArray: (@arguments, callback) ->
# TODO: Check for empty elements and remove them
# TODO: remove any unwanted element that has no text in it
callback(null, @arguments)
call: (@myContract, @functionName, @arguments) ->
that = this
console.log @myContract
console.log @functionName
console.log @arguments
@checkArray @arguments, (error, args) ->
if !error
if args.length > 0
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName].apply(this, args)
else
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName]()
console.log result
that.showOutput that.myContract.address, result
toggle: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
| 123928 | AtomSolidityView = require './ethereum-interface-view'
path = require 'path'
fs = require 'fs'
{CompositeDisposable} = require 'atom'
Web3 = require 'web3'
React = require 'react'
ReactDOM = require 'react-dom'
{MessagePanelView, PlainMessageView, LineMessageView} = require 'atom-message-panel'
Coinbase = ''
Password = ''
rpcAddress = atom.config.get('atom-ethereum-interface.rpcAddress')
if typeof web3 != 'undefined'
web3 = new Web3(web3.currentProvider)
else
web3 = new Web3(new (Web3.providers.HttpProvider)(rpcAddress))
module.exports = AtomSolidity =
atomSolidityView: null
modalPanel: null
subscriptions: null
activate: (state) ->
@atomSolidityView = new AtomSolidityView(state.atomSolidityViewState)
@modalPanel = atom.workspace.addRightPanel(item: @atomSolidityView.getElement(), visible: false)
atom.config.observe 'atom-ethereum-interface.rpcAddress', (newValue) ->
# TODO: add url validation
urlPattern = new RegExp('(http)://?')
if urlPattern.test(newValue)
rpcAddress = newValue
# Empty global variable compiled
@compiled = {}
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register command that toggles this view
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:compile': => @compile()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:build': => @build()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:create': => @create()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:toggle': => @toggleView()
deactivate: ->
@modalPanel.destroy()
@subscriptions.dispose()
@atomSolidityView.destroy()
serialize: ->
atomSolidityViewState: @atomSolidityView.serialize()
checkConnection: (callback)->
that = this
if !web3.isConnected()
callback('Error could not connect to local geth instance!', null)
else
# If passphrase is not already set
if Password == ''
# Set coinbase
# List all accounts and set selected as coinbase
accounts = web3.eth.accounts
that.getBaseAccount accounts, (err, callback) ->
if err
console.log err
else
Coinbase = callback.account
Password = callback.password
# Check if account is locked ? then prompt for password
that.checkUnlock (err, callback) ->
callback(null, true)
callback(null, true)
checkUnlock: (Coinbase, callback) ->
# web3.personal.unlockAccount("Coinbase", password)
console.log "In checkUnlock"
toggleView: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
showErrorMessage: (line, message, callback) ->
messages = new MessagePanelView(title: 'Solidity compiler messages')
messages.attach()
messages.add new LineMessageView(line: line, message: message, className: 'red-message')
getBaseAccount: (accounts, callback) ->
# Here we will select baseAccount for rest of the operations
# we will also get password for that account
that = this
createAddressList = React.createClass(
displayName: 'addressList'
getInitialState: ->
{ account: accounts[0], password: <PASSWORD> }
_handleChange: (event) ->
this.setState { account: event.target.value }
_handlePasswordChange: (event) ->
this.setState { password: event.target.value }
_handlePassword: (event) ->
event.preventDefault()
# Return account and password
callback(null, this.state)
render: ->
# create dropdown list for accounts
React.createElement 'div', { htmlFor: 'acc-n-pass', className: 'icon icon-link' },
React.createElement 'select', { onChange: this._handleChange, value: this.state.account }, accounts.map (account, i) ->
React.createElement 'option', { value: account }, account #options are address
React.createElement 'form', { onSubmit: this._handlePassword, className: 'icon icon-lock' },
React.createElement 'input', { type: 'password', uniqueName: "<PASSWORD>", placeholder: "<PASSWORD>", value: this.state.password, onChange: this._handlePasswordChange }
React.createElement 'input', { type: 'submit', value: 'Unlock' }
)
ReactDOM.render React.createElement(createAddressList), document.getElementById('accounts-list')
callback(null, { account: accounts[0], password: '' })
combineSource: (dir, source, imports) ->
that = this
o = { encoding: 'UTF-8' }
ir = /import\ [\'\"](.+)[\'\"]\;/g
match = null
while (match = ir.exec(source))
iline = match[0]
fn = match[1]
# empty out already imported
if imports[fn]
source = source.replace(iline, '')
continue
imports[fn] = 1
subSource = fs.readFileSync("#{dir}/#{fn}", o)
match.source = that.combineSource(dir, subSource, imports)
source = source.replace(iline, match.source)
return source
compile: ->
that = this
editor = atom.workspace.getActiveTextEditor()
filePath = editor.getPath()
dir = path.dirname(filePath)
source = that.combineSource(dir, editor.getText(), {})
@checkConnection (error, callback) ->
if error
console.error error
that.showErrorMessage 0, 'Error could not connect to local geth instance!'
else
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
###
# TODO: Handle Compilation asynchronously and handle errors
###
that.compiled = web3.eth.compile.solidity(source)
# Clean View before creating
that.atomSolidityView.destroyCompiled()
console.log that.compiled
# Create inpus for every contract
for contractName of that.compiled
# Get estimated gas
estimatedGas = web3.eth.estimateGas { from: web3.eth.defaultAccount, data: that.compiled[contractName].code, gas: 1000000 }
###
# TODO: Use asynchronous call
web3.eth.estimateGas({from: '0xmyaccout...', data: "0xc6888fa1fffffffffff…..", gas: 500000 }, function(err, result){
if(!err && result !=== 500000) { … }
});
###
# contractName is the name of contract in JSON object
bytecode = that.compiled[contractName].code
# Get contract abi
ContractABI = that.compiled[contractName].info.abiDefinition
# get constructors for rendering display
inputs = []
for abiObj of ContractABI
if ContractABI[abiObj].type is "constructor" && ContractABI[abiObj].inputs.length > 0
inputs = ContractABI[abiObj].inputs
# Create view
that.atomSolidityView.setContractView(contractName, bytecode, ContractABI, inputs, estimatedGas)
# Show contract code
if not that.modalPanel.isVisible()
that.modalPanel.show()
return
build: ->
that = this
constructVars = []
i = 0
console.log @compiled
for contractName of @compiled
variables = []
estimatedGas = 0
if document.getElementById(contractName + '_create')
# contractName is the name of contract in JSON object
bytecode = @compiled[contractName].code
# Get contract abi
ContractABI = @compiled[contractName].info.abiDefinition
# Collect variable inputs
inputVars = if document.getElementById(contractName + '_inputs') then document.getElementById(contractName + '_inputs').getElementsByTagName('input')
if inputVars
while i < inputVars.length
if inputVars.item(i).getAttribute('id') == contractName + '_gas'
estimatedGas = inputVars.item(i).value
inputVars.item(i).readOnly = true
break
inputObj = {
"varName": inputVars.item(i).getAttribute('id'),
"varValue": inputVars.item(i).value
}
variables[i] = inputObj
inputVars.item(i).readOnly = true
if inputVars.item(i).nextSibling.getAttribute('id') == contractName + '_create'
break
else
i++
constructVars[contractName] = {
'contractName': contractName,
'inputVariables': variables,
'estimatedGas': estimatedGas
}
# Create React element for create button
createButton = React.createClass(
displayName: 'createButton'
_handleSubmit: ->
console.log constructVars
that.create(that.compiled[Object.keys(this.refs)[0]].info.abiDefinition, that.compiled[Object.keys(this.refs)[0]].code, constructVars[Object.keys(this.refs)[0]], Object.keys(this.refs)[0], constructVars[Object.keys(this.refs)[0]].estimatedGas)
render: ->
React.createElement('form', { onSubmit: this._handleSubmit },
React.createElement('input', {type: 'submit', value: 'Create', ref: contractName, className: 'btn btn-primary inline-block-tight'}, null))
)
ReactDOM.render React.createElement(createButton, null), document.getElementById(contractName + '_create')
prepareEnv: (contractName, callback) ->
if document.getElementById(@contractName + '_create')
document.getElementById(@contractName + '_create').style.visibility = 'hidden'
document.getElementById(@contractName + '_stat').innerText = 'transaction sent, waiting for confirmation...'
callback(null, true)
else
e = new Error('Could not parse input')
callback(e, null)
# our asyncLoop
asyncLoop: (iterations, func, callback) ->
index = 0
done = false
cycle =
next: ->
if done
return
if index < iterations
index++
func cycle
else
done = true
callback()
iteration: ->
index - 1
break: ->
done = true
callback()
cycle.next()
cycle
# Construct function buttons from abi
constructFunctions: (@contractABI, callback) ->
for contractFunction in contractABI
if contractFunction.type = 'function' and contractFunction.name != null and contractFunction.name != undefined
@createChilds contractFunction, (error, childInputs) ->
if !error
callback(null, [contractFunction.name, childInputs])
else
callback(null, [null, null])
createChilds: (contractFunction, callback) ->
reactElements = []
i = 0
if contractFunction.inputs.length > 0
while i < contractFunction.inputs.length
reactElements[i] = [contractFunction.inputs[i].type, contractFunction.inputs[i].name]
i++
callback(null, reactElements)
# Construct react child inputs
create: (@abi, @code, @constructVars, @contractName, @estimatedGas) ->
that = this
@estimatedGas = if @estimatedGas > 0 then @estimatedGas else 1000000
if Password == ''
e = new Error('Empty password')
console.error ("Empty password")
@showErrorMessage 0, "No password provided"
return
# hide create button
@prepareEnv @contractName, (err, callback) ->
if err
console.error err
else
# Use coinbase
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
# set variables and render display
constructorS = []
for i in that.constructVars.inputVariables
constructorS.push i.varValue
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
web3.eth.contract(that.abi).new constructorS.toString(), { data: that.code, from: web3.eth.defaultAccount, gas: that.estimatedGas }, (err, contract) ->
if err
console.error err
that.showErrorMessage 129, err
return
# callback fires twice, we only want the second call when the contract is deployed
else if contract.address
myContract = contract
console.log 'address: ' + myContract.address
document.getElementById(that.contractName + '_stat').innerText = 'Mined!'
document.getElementById(that.contractName + '_stat').setAttribute('class', 'icon icon-zap') # Add icon class
document.getElementById(that.contractName + '_address').innerText = myContract.address
document.getElementById(that.contractName + '_address').setAttribute('class', 'icon icon-key') # Add icon class
# Check every key, if it is a function create call buttons,
# for every function there could be many call methods,
# for every method there cpould be many inputs
# Innermost callback will have inputs for all abi objects
# Lets think the Innermost function
# Construct view for function call view
functionABI = React.createClass(
displayName: 'callFunctions'
getInitialState: ->
{ childFunctions: [] }
componentDidMount: ->
self = this
that.constructFunctions that.abi, (error, childFunctions) ->
if !error
self.state.childFunctions.push(childFunctions)
self.forceUpdate()
_handleChange: (childFunction, event) ->
console.log event.target.value
this.setState { value: event.target.value }
_handleSubmit: (childFunction, event) ->
# Get arguments ready here
that.argsToArray this.refs, childFunction, (error, argArray) ->
if !error
that.call(myContract, childFunction, argArray)
render: ->
self = this
React.createElement 'div', { htmlFor: 'contractFunctions' }, this.state.childFunctions.map((childFunction, i) ->
React.createElement 'form', { onSubmit: self._handleSubmit.bind(this, childFunction[0]), key: i, ref: childFunction[0] },
React.createElement 'input', { type: 'submit', readOnly: 'true', value: childFunction[0], className: 'text-subtle call-button' }
childFunction[1].map((childInput, j) ->
React.createElement 'input', { tye: 'text', handleChange: self._handleChange, placeholder: childInput[0] + ' ' + childInput[1], className: 'call-button-values' }#, ref: if childFunction[0] then childFunction[0][j] else "Constructor" }
)
)
)
ReactDOM.render React.createElement(functionABI), document.getElementById(that.contractName + '_call')
else if !contract.address
contractStat = React.createClass(
render: ->
React.createElement 'div', { htmlFor: 'contractStat' },
React.createElement 'span', { className: 'inline-block highlight' }, 'TransactionHash: '
React.createElement 'pre', { className: 'large-code' }, contract.transactionHash
React.createElement 'span', { className: 'stat-mining stat-mining-align' }, 'waiting to be mined '
React.createElement 'span', { className: 'loading loading-spinner-tiny inline-block stat-mining-align' }
)
ReactDOM.render React.createElement(contractStat), document.getElementById(that.contractName + '_stat')
# document.getElementById(that.contractName + '_stat').innerText = "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
console.log "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
showOutput: (address, output) ->
messages = new MessagePanelView(title: 'Solidity compiler output')
messages.attach()
address = 'Contract address: ' + address
output = 'Contract output: ' + output
messages.add new PlainMessageView(message: address, className: 'green-message')
messages.add new PlainMessageView(message: output, className: 'green-message')
argsToArray: (@reactElements, @childFunction, callback) ->
that = this
# For every childNodes of childFunction
# Get value of childFunction
# Trim value having name of the function
args = new Array()
@asyncLoop @reactElements[@childFunction].childNodes.length, ((cycle) ->
if that.reactElements[that.childFunction][cycle.iteration()].type != 'submit'
args.push(that.reactElements[that.childFunction][cycle.iteration()].value)
cycle.next()
), ->
callback(null, args)
checkArray: (@arguments, callback) ->
# TODO: Check for empty elements and remove them
# TODO: remove any unwanted element that has no text in it
callback(null, @arguments)
call: (@myContract, @functionName, @arguments) ->
that = this
console.log @myContract
console.log @functionName
console.log @arguments
@checkArray @arguments, (error, args) ->
if !error
if args.length > 0
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName].apply(this, args)
else
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName]()
console.log result
that.showOutput that.myContract.address, result
toggle: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
| true | AtomSolidityView = require './ethereum-interface-view'
path = require 'path'
fs = require 'fs'
{CompositeDisposable} = require 'atom'
Web3 = require 'web3'
React = require 'react'
ReactDOM = require 'react-dom'
{MessagePanelView, PlainMessageView, LineMessageView} = require 'atom-message-panel'
Coinbase = ''
Password = ''
rpcAddress = atom.config.get('atom-ethereum-interface.rpcAddress')
if typeof web3 != 'undefined'
web3 = new Web3(web3.currentProvider)
else
web3 = new Web3(new (Web3.providers.HttpProvider)(rpcAddress))
module.exports = AtomSolidity =
atomSolidityView: null
modalPanel: null
subscriptions: null
activate: (state) ->
@atomSolidityView = new AtomSolidityView(state.atomSolidityViewState)
@modalPanel = atom.workspace.addRightPanel(item: @atomSolidityView.getElement(), visible: false)
atom.config.observe 'atom-ethereum-interface.rpcAddress', (newValue) ->
# TODO: add url validation
urlPattern = new RegExp('(http)://?')
if urlPattern.test(newValue)
rpcAddress = newValue
# Empty global variable compiled
@compiled = {}
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register command that toggles this view
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:compile': => @compile()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:build': => @build()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:create': => @create()
@subscriptions.add atom.commands.add 'atom-workspace', 'eth-interface:toggle': => @toggleView()
deactivate: ->
@modalPanel.destroy()
@subscriptions.dispose()
@atomSolidityView.destroy()
serialize: ->
atomSolidityViewState: @atomSolidityView.serialize()
checkConnection: (callback)->
that = this
if !web3.isConnected()
callback('Error could not connect to local geth instance!', null)
else
# If passphrase is not already set
if Password == ''
# Set coinbase
# List all accounts and set selected as coinbase
accounts = web3.eth.accounts
that.getBaseAccount accounts, (err, callback) ->
if err
console.log err
else
Coinbase = callback.account
Password = callback.password
# Check if account is locked ? then prompt for password
that.checkUnlock (err, callback) ->
callback(null, true)
callback(null, true)
checkUnlock: (Coinbase, callback) ->
# web3.personal.unlockAccount("Coinbase", password)
console.log "In checkUnlock"
toggleView: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
showErrorMessage: (line, message, callback) ->
messages = new MessagePanelView(title: 'Solidity compiler messages')
messages.attach()
messages.add new LineMessageView(line: line, message: message, className: 'red-message')
getBaseAccount: (accounts, callback) ->
# Here we will select baseAccount for rest of the operations
# we will also get password for that account
that = this
createAddressList = React.createClass(
displayName: 'addressList'
getInitialState: ->
{ account: accounts[0], password: PI:PASSWORD:<PASSWORD>END_PI }
_handleChange: (event) ->
this.setState { account: event.target.value }
_handlePasswordChange: (event) ->
this.setState { password: event.target.value }
_handlePassword: (event) ->
event.preventDefault()
# Return account and password
callback(null, this.state)
render: ->
# create dropdown list for accounts
React.createElement 'div', { htmlFor: 'acc-n-pass', className: 'icon icon-link' },
React.createElement 'select', { onChange: this._handleChange, value: this.state.account }, accounts.map (account, i) ->
React.createElement 'option', { value: account }, account #options are address
React.createElement 'form', { onSubmit: this._handlePassword, className: 'icon icon-lock' },
React.createElement 'input', { type: 'password', uniqueName: "PI:PASSWORD:<PASSWORD>END_PI", placeholder: "PI:PASSWORD:<PASSWORD>END_PI", value: this.state.password, onChange: this._handlePasswordChange }
React.createElement 'input', { type: 'submit', value: 'Unlock' }
)
ReactDOM.render React.createElement(createAddressList), document.getElementById('accounts-list')
callback(null, { account: accounts[0], password: '' })
combineSource: (dir, source, imports) ->
that = this
o = { encoding: 'UTF-8' }
ir = /import\ [\'\"](.+)[\'\"]\;/g
match = null
while (match = ir.exec(source))
iline = match[0]
fn = match[1]
# empty out already imported
if imports[fn]
source = source.replace(iline, '')
continue
imports[fn] = 1
subSource = fs.readFileSync("#{dir}/#{fn}", o)
match.source = that.combineSource(dir, subSource, imports)
source = source.replace(iline, match.source)
return source
compile: ->
that = this
editor = atom.workspace.getActiveTextEditor()
filePath = editor.getPath()
dir = path.dirname(filePath)
source = that.combineSource(dir, editor.getText(), {})
@checkConnection (error, callback) ->
if error
console.error error
that.showErrorMessage 0, 'Error could not connect to local geth instance!'
else
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
###
# TODO: Handle Compilation asynchronously and handle errors
###
that.compiled = web3.eth.compile.solidity(source)
# Clean View before creating
that.atomSolidityView.destroyCompiled()
console.log that.compiled
# Create inpus for every contract
for contractName of that.compiled
# Get estimated gas
estimatedGas = web3.eth.estimateGas { from: web3.eth.defaultAccount, data: that.compiled[contractName].code, gas: 1000000 }
###
# TODO: Use asynchronous call
web3.eth.estimateGas({from: '0xmyaccout...', data: "0xc6888fa1fffffffffff…..", gas: 500000 }, function(err, result){
if(!err && result !=== 500000) { … }
});
###
# contractName is the name of contract in JSON object
bytecode = that.compiled[contractName].code
# Get contract abi
ContractABI = that.compiled[contractName].info.abiDefinition
# get constructors for rendering display
inputs = []
for abiObj of ContractABI
if ContractABI[abiObj].type is "constructor" && ContractABI[abiObj].inputs.length > 0
inputs = ContractABI[abiObj].inputs
# Create view
that.atomSolidityView.setContractView(contractName, bytecode, ContractABI, inputs, estimatedGas)
# Show contract code
if not that.modalPanel.isVisible()
that.modalPanel.show()
return
build: ->
that = this
constructVars = []
i = 0
console.log @compiled
for contractName of @compiled
variables = []
estimatedGas = 0
if document.getElementById(contractName + '_create')
# contractName is the name of contract in JSON object
bytecode = @compiled[contractName].code
# Get contract abi
ContractABI = @compiled[contractName].info.abiDefinition
# Collect variable inputs
inputVars = if document.getElementById(contractName + '_inputs') then document.getElementById(contractName + '_inputs').getElementsByTagName('input')
if inputVars
while i < inputVars.length
if inputVars.item(i).getAttribute('id') == contractName + '_gas'
estimatedGas = inputVars.item(i).value
inputVars.item(i).readOnly = true
break
inputObj = {
"varName": inputVars.item(i).getAttribute('id'),
"varValue": inputVars.item(i).value
}
variables[i] = inputObj
inputVars.item(i).readOnly = true
if inputVars.item(i).nextSibling.getAttribute('id') == contractName + '_create'
break
else
i++
constructVars[contractName] = {
'contractName': contractName,
'inputVariables': variables,
'estimatedGas': estimatedGas
}
# Create React element for create button
createButton = React.createClass(
displayName: 'createButton'
_handleSubmit: ->
console.log constructVars
that.create(that.compiled[Object.keys(this.refs)[0]].info.abiDefinition, that.compiled[Object.keys(this.refs)[0]].code, constructVars[Object.keys(this.refs)[0]], Object.keys(this.refs)[0], constructVars[Object.keys(this.refs)[0]].estimatedGas)
render: ->
React.createElement('form', { onSubmit: this._handleSubmit },
React.createElement('input', {type: 'submit', value: 'Create', ref: contractName, className: 'btn btn-primary inline-block-tight'}, null))
)
ReactDOM.render React.createElement(createButton, null), document.getElementById(contractName + '_create')
prepareEnv: (contractName, callback) ->
if document.getElementById(@contractName + '_create')
document.getElementById(@contractName + '_create').style.visibility = 'hidden'
document.getElementById(@contractName + '_stat').innerText = 'transaction sent, waiting for confirmation...'
callback(null, true)
else
e = new Error('Could not parse input')
callback(e, null)
# our asyncLoop
asyncLoop: (iterations, func, callback) ->
index = 0
done = false
cycle =
next: ->
if done
return
if index < iterations
index++
func cycle
else
done = true
callback()
iteration: ->
index - 1
break: ->
done = true
callback()
cycle.next()
cycle
# Construct function buttons from abi
constructFunctions: (@contractABI, callback) ->
for contractFunction in contractABI
if contractFunction.type = 'function' and contractFunction.name != null and contractFunction.name != undefined
@createChilds contractFunction, (error, childInputs) ->
if !error
callback(null, [contractFunction.name, childInputs])
else
callback(null, [null, null])
createChilds: (contractFunction, callback) ->
reactElements = []
i = 0
if contractFunction.inputs.length > 0
while i < contractFunction.inputs.length
reactElements[i] = [contractFunction.inputs[i].type, contractFunction.inputs[i].name]
i++
callback(null, reactElements)
# Construct react child inputs
create: (@abi, @code, @constructVars, @contractName, @estimatedGas) ->
that = this
@estimatedGas = if @estimatedGas > 0 then @estimatedGas else 1000000
if Password == ''
e = new Error('Empty password')
console.error ("Empty password")
@showErrorMessage 0, "No password provided"
return
# hide create button
@prepareEnv @contractName, (err, callback) ->
if err
console.error err
else
# Use coinbase
web3.eth.defaultAccount = Coinbase
console.log "Using coinbase: " + web3.eth.defaultAccount
# set variables and render display
constructorS = []
for i in that.constructVars.inputVariables
constructorS.push i.varValue
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
web3.eth.contract(that.abi).new constructorS.toString(), { data: that.code, from: web3.eth.defaultAccount, gas: that.estimatedGas }, (err, contract) ->
if err
console.error err
that.showErrorMessage 129, err
return
# callback fires twice, we only want the second call when the contract is deployed
else if contract.address
myContract = contract
console.log 'address: ' + myContract.address
document.getElementById(that.contractName + '_stat').innerText = 'Mined!'
document.getElementById(that.contractName + '_stat').setAttribute('class', 'icon icon-zap') # Add icon class
document.getElementById(that.contractName + '_address').innerText = myContract.address
document.getElementById(that.contractName + '_address').setAttribute('class', 'icon icon-key') # Add icon class
# Check every key, if it is a function create call buttons,
# for every function there could be many call methods,
# for every method there cpould be many inputs
# Innermost callback will have inputs for all abi objects
# Lets think the Innermost function
# Construct view for function call view
functionABI = React.createClass(
displayName: 'callFunctions'
getInitialState: ->
{ childFunctions: [] }
componentDidMount: ->
self = this
that.constructFunctions that.abi, (error, childFunctions) ->
if !error
self.state.childFunctions.push(childFunctions)
self.forceUpdate()
_handleChange: (childFunction, event) ->
console.log event.target.value
this.setState { value: event.target.value }
_handleSubmit: (childFunction, event) ->
# Get arguments ready here
that.argsToArray this.refs, childFunction, (error, argArray) ->
if !error
that.call(myContract, childFunction, argArray)
render: ->
self = this
React.createElement 'div', { htmlFor: 'contractFunctions' }, this.state.childFunctions.map((childFunction, i) ->
React.createElement 'form', { onSubmit: self._handleSubmit.bind(this, childFunction[0]), key: i, ref: childFunction[0] },
React.createElement 'input', { type: 'submit', readOnly: 'true', value: childFunction[0], className: 'text-subtle call-button' }
childFunction[1].map((childInput, j) ->
React.createElement 'input', { tye: 'text', handleChange: self._handleChange, placeholder: childInput[0] + ' ' + childInput[1], className: 'call-button-values' }#, ref: if childFunction[0] then childFunction[0][j] else "Constructor" }
)
)
)
ReactDOM.render React.createElement(functionABI), document.getElementById(that.contractName + '_call')
else if !contract.address
contractStat = React.createClass(
render: ->
React.createElement 'div', { htmlFor: 'contractStat' },
React.createElement 'span', { className: 'inline-block highlight' }, 'TransactionHash: '
React.createElement 'pre', { className: 'large-code' }, contract.transactionHash
React.createElement 'span', { className: 'stat-mining stat-mining-align' }, 'waiting to be mined '
React.createElement 'span', { className: 'loading loading-spinner-tiny inline-block stat-mining-align' }
)
ReactDOM.render React.createElement(contractStat), document.getElementById(that.contractName + '_stat')
# document.getElementById(that.contractName + '_stat').innerText = "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
console.log "Contract transaction send: TransactionHash: " + contract.transactionHash + " waiting to be mined..."
showOutput: (address, output) ->
messages = new MessagePanelView(title: 'Solidity compiler output')
messages.attach()
address = 'Contract address: ' + address
output = 'Contract output: ' + output
messages.add new PlainMessageView(message: address, className: 'green-message')
messages.add new PlainMessageView(message: output, className: 'green-message')
argsToArray: (@reactElements, @childFunction, callback) ->
that = this
# For every childNodes of childFunction
# Get value of childFunction
# Trim value having name of the function
args = new Array()
@asyncLoop @reactElements[@childFunction].childNodes.length, ((cycle) ->
if that.reactElements[that.childFunction][cycle.iteration()].type != 'submit'
args.push(that.reactElements[that.childFunction][cycle.iteration()].value)
cycle.next()
), ->
callback(null, args)
checkArray: (@arguments, callback) ->
# TODO: Check for empty elements and remove them
# TODO: remove any unwanted element that has no text in it
callback(null, @arguments)
call: (@myContract, @functionName, @arguments) ->
that = this
console.log @myContract
console.log @functionName
console.log @arguments
@checkArray @arguments, (error, args) ->
if !error
if args.length > 0
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName].apply(this, args)
else
web3.personal.unlockAccount(web3.eth.defaultAccount, Password)
result = that.myContract[that.functionName]()
console.log result
that.showOutput that.myContract.address, result
toggle: ->
if @modalPanel.isVisible()
@modalPanel.hide()
else
@modalPanel.show()
|
[
{
"context": " @fileoverview A class of the code path.\n# @author Toru Nagashima\n###\n\n'use strict'\n\n#-----------------------------",
"end": 71,
"score": 0.9998580813407898,
"start": 57,
"tag": "NAME",
"value": "Toru Nagashima"
}
] | src/code-path-analysis/code-path.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview A class of the code path.
# @author Toru Nagashima
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
CodePathState = require './code-path-state'
IdGenerator = require '../eslint-code-path-analysis-id-generator'
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# A code path.
###
class CodePath
###*
# @param {string} id - An identifier.
# @param {CodePath|null} upper - The code path of the upper function scope.
# @param {Function} onLooped - A callback function to notify looping.
###
constructor: (
###*
# The identifier of this code path.
# Rules use it to store additional information of each rule.
# @type {string}
###
@id
###*
# The code path of the upper function scope.
# @type {CodePath|null}
###
@upper
onLooped
) ->
###*
# The code paths of nested function scopes.
# @type {CodePath[]}
###
@childCodePaths = []
# Initializes internal state.
Object.defineProperty @, 'internal',
value: new CodePathState new IdGenerator("#{@id}_"), onLooped
# Adds this into `childCodePaths` of `upper`.
@upper?.childCodePaths.push @
###*
# Gets the state of a given code path.
#
# @param {CodePath} codePath - A code path to get.
# @returns {CodePathState} The state of the code path.
###
@getState: (codePath) -> codePath.internal
###*
# Traverses all segments in this code path.
#
# codePath.traverseSegments(function(segment, controller) {
# // do something.
# });
#
# This method enumerates segments in order from the head.
#
# The `controller` object has two methods.
#
# - `controller.skip()` - Skip the following segments in this branch.
# - `controller.break()` - Skip all following segments.
#
# @param {Object} [options] - Omittable.
# @param {CodePathSegment} [options.first] - The first segment to traverse.
# @param {CodePathSegment} [options.last] - The last segment to traverse.
# @param {Function} callback - A callback function.
# @returns {void}
###
traverseSegments: (options, callback) ->
if typeof options is 'function'
resolvedCallback = options
resolvedOptions = {}
else
resolvedOptions = options or {}
resolvedCallback = callback
startSegment = resolvedOptions.first or @internal.initialSegment
lastSegment = resolvedOptions.last
item = null
index = 0
end = 0
segment = null
visited = Object.create null
stack = [[startSegment, 0]]
skippedSegment = null
broken = no
controller =
skip: ->
if stack.length <= 1
broken ###:### = yes
else
skippedSegment = stack[stack.length - 2][0]
break: -> broken ###:### = yes
###*
# Checks a given previous segment has been visited.
# @param {CodePathSegment} prevSegment - A previous segment to check.
# @returns {boolean} `true` if the segment has been visited.
###
isVisited = (prevSegment) ->
visited[prevSegment.id] or segment.isLoopedPrevSegment prevSegment
while stack.length > 0
item = stack[stack.length - 1]
segment = item[0]
index = item[1]
if index is 0
# Skip if this segment has been visited already.
if visited[segment.id]
stack.pop()
continue
# Skip if all previous segments have not been visited.
if (
segment isnt startSegment and
segment.prevSegments.length > 0 and
not segment.prevSegments.every isVisited
)
stack.pop()
continue
# Reset the flag of skipping if all branches have been skipped.
if (
skippedSegment and
segment.prevSegments.indexOf(skippedSegment) isnt -1
)
skippedSegment = null
visited[segment.id] = yes
# Call the callback when the first time.
unless skippedSegment
resolvedCallback.call @, segment, controller
if segment is lastSegment then controller.skip()
if broken then break
# Update the stack.
end = segment.nextSegments.length - 1
if index < end
item[1] += 1
stack.push [segment.nextSegments[index], 0]
else if index is end
item[0] = segment.nextSegments[index]
item[1] = 0
else
stack.pop()
###*
# Current code path segments.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'currentSegments',
get: -> @internal.currentSegments
###*
# The initial code path segment.
# @type {CodePathSegment}
###
Object.defineProperty CodePath.prototype, 'initialSegment',
get: -> @internal.initialSegment
###*
# Final code path segments.
# This array is a mix of `returnedSegments` and `thrownSegments`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'finalSegments',
get: -> @internal.finalSegments
###*
# Final code path segments which is with `return` statements.
# This array contains the last path segment if it's reachable.
# Since the reachable last path returns `undefined`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'returnedSegments',
get: -> @internal.returnedForkContext
###*
# Final code path segments which is with `throw` statements.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'thrownSegments',
get: -> @internal.thrownForkContext
module.exports = CodePath
| 46402 | ###*
# @fileoverview A class of the code path.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
CodePathState = require './code-path-state'
IdGenerator = require '../eslint-code-path-analysis-id-generator'
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# A code path.
###
class CodePath
###*
# @param {string} id - An identifier.
# @param {CodePath|null} upper - The code path of the upper function scope.
# @param {Function} onLooped - A callback function to notify looping.
###
constructor: (
###*
# The identifier of this code path.
# Rules use it to store additional information of each rule.
# @type {string}
###
@id
###*
# The code path of the upper function scope.
# @type {CodePath|null}
###
@upper
onLooped
) ->
###*
# The code paths of nested function scopes.
# @type {CodePath[]}
###
@childCodePaths = []
# Initializes internal state.
Object.defineProperty @, 'internal',
value: new CodePathState new IdGenerator("#{@id}_"), onLooped
# Adds this into `childCodePaths` of `upper`.
@upper?.childCodePaths.push @
###*
# Gets the state of a given code path.
#
# @param {CodePath} codePath - A code path to get.
# @returns {CodePathState} The state of the code path.
###
@getState: (codePath) -> codePath.internal
###*
# Traverses all segments in this code path.
#
# codePath.traverseSegments(function(segment, controller) {
# // do something.
# });
#
# This method enumerates segments in order from the head.
#
# The `controller` object has two methods.
#
# - `controller.skip()` - Skip the following segments in this branch.
# - `controller.break()` - Skip all following segments.
#
# @param {Object} [options] - Omittable.
# @param {CodePathSegment} [options.first] - The first segment to traverse.
# @param {CodePathSegment} [options.last] - The last segment to traverse.
# @param {Function} callback - A callback function.
# @returns {void}
###
traverseSegments: (options, callback) ->
if typeof options is 'function'
resolvedCallback = options
resolvedOptions = {}
else
resolvedOptions = options or {}
resolvedCallback = callback
startSegment = resolvedOptions.first or @internal.initialSegment
lastSegment = resolvedOptions.last
item = null
index = 0
end = 0
segment = null
visited = Object.create null
stack = [[startSegment, 0]]
skippedSegment = null
broken = no
controller =
skip: ->
if stack.length <= 1
broken ###:### = yes
else
skippedSegment = stack[stack.length - 2][0]
break: -> broken ###:### = yes
###*
# Checks a given previous segment has been visited.
# @param {CodePathSegment} prevSegment - A previous segment to check.
# @returns {boolean} `true` if the segment has been visited.
###
isVisited = (prevSegment) ->
visited[prevSegment.id] or segment.isLoopedPrevSegment prevSegment
while stack.length > 0
item = stack[stack.length - 1]
segment = item[0]
index = item[1]
if index is 0
# Skip if this segment has been visited already.
if visited[segment.id]
stack.pop()
continue
# Skip if all previous segments have not been visited.
if (
segment isnt startSegment and
segment.prevSegments.length > 0 and
not segment.prevSegments.every isVisited
)
stack.pop()
continue
# Reset the flag of skipping if all branches have been skipped.
if (
skippedSegment and
segment.prevSegments.indexOf(skippedSegment) isnt -1
)
skippedSegment = null
visited[segment.id] = yes
# Call the callback when the first time.
unless skippedSegment
resolvedCallback.call @, segment, controller
if segment is lastSegment then controller.skip()
if broken then break
# Update the stack.
end = segment.nextSegments.length - 1
if index < end
item[1] += 1
stack.push [segment.nextSegments[index], 0]
else if index is end
item[0] = segment.nextSegments[index]
item[1] = 0
else
stack.pop()
###*
# Current code path segments.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'currentSegments',
get: -> @internal.currentSegments
###*
# The initial code path segment.
# @type {CodePathSegment}
###
Object.defineProperty CodePath.prototype, 'initialSegment',
get: -> @internal.initialSegment
###*
# Final code path segments.
# This array is a mix of `returnedSegments` and `thrownSegments`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'finalSegments',
get: -> @internal.finalSegments
###*
# Final code path segments which is with `return` statements.
# This array contains the last path segment if it's reachable.
# Since the reachable last path returns `undefined`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'returnedSegments',
get: -> @internal.returnedForkContext
###*
# Final code path segments which is with `throw` statements.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'thrownSegments',
get: -> @internal.thrownForkContext
module.exports = CodePath
| true | ###*
# @fileoverview A class of the code path.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
CodePathState = require './code-path-state'
IdGenerator = require '../eslint-code-path-analysis-id-generator'
#------------------------------------------------------------------------------
# Public Interface
#------------------------------------------------------------------------------
###*
# A code path.
###
class CodePath
###*
# @param {string} id - An identifier.
# @param {CodePath|null} upper - The code path of the upper function scope.
# @param {Function} onLooped - A callback function to notify looping.
###
constructor: (
###*
# The identifier of this code path.
# Rules use it to store additional information of each rule.
# @type {string}
###
@id
###*
# The code path of the upper function scope.
# @type {CodePath|null}
###
@upper
onLooped
) ->
###*
# The code paths of nested function scopes.
# @type {CodePath[]}
###
@childCodePaths = []
# Initializes internal state.
Object.defineProperty @, 'internal',
value: new CodePathState new IdGenerator("#{@id}_"), onLooped
# Adds this into `childCodePaths` of `upper`.
@upper?.childCodePaths.push @
###*
# Gets the state of a given code path.
#
# @param {CodePath} codePath - A code path to get.
# @returns {CodePathState} The state of the code path.
###
@getState: (codePath) -> codePath.internal
###*
# Traverses all segments in this code path.
#
# codePath.traverseSegments(function(segment, controller) {
# // do something.
# });
#
# This method enumerates segments in order from the head.
#
# The `controller` object has two methods.
#
# - `controller.skip()` - Skip the following segments in this branch.
# - `controller.break()` - Skip all following segments.
#
# @param {Object} [options] - Omittable.
# @param {CodePathSegment} [options.first] - The first segment to traverse.
# @param {CodePathSegment} [options.last] - The last segment to traverse.
# @param {Function} callback - A callback function.
# @returns {void}
###
traverseSegments: (options, callback) ->
if typeof options is 'function'
resolvedCallback = options
resolvedOptions = {}
else
resolvedOptions = options or {}
resolvedCallback = callback
startSegment = resolvedOptions.first or @internal.initialSegment
lastSegment = resolvedOptions.last
item = null
index = 0
end = 0
segment = null
visited = Object.create null
stack = [[startSegment, 0]]
skippedSegment = null
broken = no
controller =
skip: ->
if stack.length <= 1
broken ###:### = yes
else
skippedSegment = stack[stack.length - 2][0]
break: -> broken ###:### = yes
###*
# Checks a given previous segment has been visited.
# @param {CodePathSegment} prevSegment - A previous segment to check.
# @returns {boolean} `true` if the segment has been visited.
###
isVisited = (prevSegment) ->
visited[prevSegment.id] or segment.isLoopedPrevSegment prevSegment
while stack.length > 0
item = stack[stack.length - 1]
segment = item[0]
index = item[1]
if index is 0
# Skip if this segment has been visited already.
if visited[segment.id]
stack.pop()
continue
# Skip if all previous segments have not been visited.
if (
segment isnt startSegment and
segment.prevSegments.length > 0 and
not segment.prevSegments.every isVisited
)
stack.pop()
continue
# Reset the flag of skipping if all branches have been skipped.
if (
skippedSegment and
segment.prevSegments.indexOf(skippedSegment) isnt -1
)
skippedSegment = null
visited[segment.id] = yes
# Call the callback when the first time.
unless skippedSegment
resolvedCallback.call @, segment, controller
if segment is lastSegment then controller.skip()
if broken then break
# Update the stack.
end = segment.nextSegments.length - 1
if index < end
item[1] += 1
stack.push [segment.nextSegments[index], 0]
else if index is end
item[0] = segment.nextSegments[index]
item[1] = 0
else
stack.pop()
###*
# Current code path segments.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'currentSegments',
get: -> @internal.currentSegments
###*
# The initial code path segment.
# @type {CodePathSegment}
###
Object.defineProperty CodePath.prototype, 'initialSegment',
get: -> @internal.initialSegment
###*
# Final code path segments.
# This array is a mix of `returnedSegments` and `thrownSegments`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'finalSegments',
get: -> @internal.finalSegments
###*
# Final code path segments which is with `return` statements.
# This array contains the last path segment if it's reachable.
# Since the reachable last path returns `undefined`.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'returnedSegments',
get: -> @internal.returnedForkContext
###*
# Final code path segments which is with `throw` statements.
# @type {CodePathSegment[]}
###
Object.defineProperty CodePath.prototype, 'thrownSegments',
get: -> @internal.thrownForkContext
module.exports = CodePath
|
[
{
"context": ".'\n 'es-ES':\n 'text': '¡Buenas Mago! ¿Vienes a practicar? Bien, empecemos...'\n ",
"end": 400,
"score": 0.7841633558273315,
"start": 396,
"tag": "NAME",
"value": "Mago"
},
{
"context": "s...'\n 'es':\n 'text': '¡Buenas Mag... | test/app/lib/utils.spec.coffee | rishiloyola/codecombat | 0 | describe 'Utility library', ->
util = require 'lib/utils'
describe 'i18n', ->
beforeEach ->
this.fixture1 =
'text': 'G\'day, Wizard! Come to practice? Well, let\'s get started...'
'blurb': 'G\'day'
'i18n':
'es-419':
'text': '¡Buenas, Hechicero! ¿Vienes a practicar? Bueno, empecemos...'
'es-ES':
'text': '¡Buenas Mago! ¿Vienes a practicar? Bien, empecemos...'
'es':
'text': '¡Buenas Mago! ¿Vienes a practicar? Muy bien, empecemos...'
'fr':
'text': 'S\'lut, Magicien! Venu pratiquer? Ok, bien débutons...'
'pt-BR':
'text': 'Bom dia, feiticeiro! Veio praticar? Então vamos começar...'
'en':
'text': 'Ohai Magician!'
'de':
'text': '\'N Tach auch, Zauberer! Kommst Du zum Üben? Dann lass uns anfangen...'
'sv':
'text': 'Godagens, trollkarl! Kommit för att öva? Nå, låt oss börja...'
it 'i18n should find a valid target string', ->
expect(util.i18n(this.fixture1, 'text', 'sv')).toEqual(this.fixture1.i18n['sv'].text)
expect(util.i18n(this.fixture1, 'text', 'es-ES')).toEqual(this.fixture1.i18n['es-ES'].text)
it 'i18n picks the correct fallback for a specific language', ->
expect(util.i18n(this.fixture1, 'text', 'fr-be')).toEqual(this.fixture1.i18n['fr'].text)
it 'i18n picks the correct fallback', ->
expect(util.i18n(this.fixture1, 'text', 'nl')).toEqual(this.fixture1.i18n['en'].text)
expect(util.i18n(this.fixture1, 'text', 'nl', 'de')).toEqual(this.fixture1.i18n['de'].text)
it 'i18n falls back to the default text, even for other targets (like blurb)', ->
delete this.fixture1.i18n['en']
expect(util.i18n(this.fixture1, 'text', 'en')).toEqual(this.fixture1.text)
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(this.fixture1.blurb)
delete this.fixture1.blurb
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(null)
it 'i18n can fall forward if a general language is not found', ->
expect(util.i18n(this.fixture1, 'text', 'pt')).toEqual(this.fixture1.i18n['pt-BR'].text)
describe 'Miscellaneous utility', ->
| 20707 | describe 'Utility library', ->
util = require 'lib/utils'
describe 'i18n', ->
beforeEach ->
this.fixture1 =
'text': 'G\'day, Wizard! Come to practice? Well, let\'s get started...'
'blurb': 'G\'day'
'i18n':
'es-419':
'text': '¡Buenas, Hechicero! ¿Vienes a practicar? Bueno, empecemos...'
'es-ES':
'text': '¡Buenas <NAME>! ¿Vienes a practicar? Bien, empecemos...'
'es':
'text': '¡Buenas <NAME>! ¿Vienes a practicar? Muy bien, empecemos...'
'fr':
'text': 'S\'lut, Magicien! Venu pratiquer? Ok, bien débutons...'
'pt-BR':
'text': 'Bom dia, feiticeiro! Veio praticar? Então vamos começar...'
'en':
'text': 'Ohai Magician!'
'de':
'text': '\'N Tach auch, Zauberer! Kommst Du zum Üben? Dann lass uns anfangen...'
'sv':
'text': 'Godagens, trollkarl! Kommit för att öva? Nå, låt oss börja...'
it 'i18n should find a valid target string', ->
expect(util.i18n(this.fixture1, 'text', 'sv')).toEqual(this.fixture1.i18n['sv'].text)
expect(util.i18n(this.fixture1, 'text', 'es-ES')).toEqual(this.fixture1.i18n['es-ES'].text)
it 'i18n picks the correct fallback for a specific language', ->
expect(util.i18n(this.fixture1, 'text', 'fr-be')).toEqual(this.fixture1.i18n['fr'].text)
it 'i18n picks the correct fallback', ->
expect(util.i18n(this.fixture1, 'text', 'nl')).toEqual(this.fixture1.i18n['en'].text)
expect(util.i18n(this.fixture1, 'text', 'nl', 'de')).toEqual(this.fixture1.i18n['de'].text)
it 'i18n falls back to the default text, even for other targets (like blurb)', ->
delete this.fixture1.i18n['en']
expect(util.i18n(this.fixture1, 'text', 'en')).toEqual(this.fixture1.text)
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(this.fixture1.blurb)
delete this.fixture1.blurb
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(null)
it 'i18n can fall forward if a general language is not found', ->
expect(util.i18n(this.fixture1, 'text', 'pt')).toEqual(this.fixture1.i18n['pt-BR'].text)
describe 'Miscellaneous utility', ->
| true | describe 'Utility library', ->
util = require 'lib/utils'
describe 'i18n', ->
beforeEach ->
this.fixture1 =
'text': 'G\'day, Wizard! Come to practice? Well, let\'s get started...'
'blurb': 'G\'day'
'i18n':
'es-419':
'text': '¡Buenas, Hechicero! ¿Vienes a practicar? Bueno, empecemos...'
'es-ES':
'text': '¡Buenas PI:NAME:<NAME>END_PI! ¿Vienes a practicar? Bien, empecemos...'
'es':
'text': '¡Buenas PI:NAME:<NAME>END_PI! ¿Vienes a practicar? Muy bien, empecemos...'
'fr':
'text': 'S\'lut, Magicien! Venu pratiquer? Ok, bien débutons...'
'pt-BR':
'text': 'Bom dia, feiticeiro! Veio praticar? Então vamos começar...'
'en':
'text': 'Ohai Magician!'
'de':
'text': '\'N Tach auch, Zauberer! Kommst Du zum Üben? Dann lass uns anfangen...'
'sv':
'text': 'Godagens, trollkarl! Kommit för att öva? Nå, låt oss börja...'
it 'i18n should find a valid target string', ->
expect(util.i18n(this.fixture1, 'text', 'sv')).toEqual(this.fixture1.i18n['sv'].text)
expect(util.i18n(this.fixture1, 'text', 'es-ES')).toEqual(this.fixture1.i18n['es-ES'].text)
it 'i18n picks the correct fallback for a specific language', ->
expect(util.i18n(this.fixture1, 'text', 'fr-be')).toEqual(this.fixture1.i18n['fr'].text)
it 'i18n picks the correct fallback', ->
expect(util.i18n(this.fixture1, 'text', 'nl')).toEqual(this.fixture1.i18n['en'].text)
expect(util.i18n(this.fixture1, 'text', 'nl', 'de')).toEqual(this.fixture1.i18n['de'].text)
it 'i18n falls back to the default text, even for other targets (like blurb)', ->
delete this.fixture1.i18n['en']
expect(util.i18n(this.fixture1, 'text', 'en')).toEqual(this.fixture1.text)
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(this.fixture1.blurb)
delete this.fixture1.blurb
expect(util.i18n(this.fixture1, 'blurb', 'en')).toEqual(null)
it 'i18n can fall forward if a general language is not found', ->
expect(util.i18n(this.fixture1, 'text', 'pt')).toEqual(this.fixture1.i18n['pt-BR'].text)
describe 'Miscellaneous utility', ->
|
[
{
"context": " beforeEach ->\n cloudinary.config cloud_name: \"test123\", api_key: 'a', api_secret: 'b', responsive_width",
"end": 156,
"score": 0.9979054927825928,
"start": 149,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "loudinary.config cloud_name: \"test123\", api_k... | keystone/node_modules/cloudinary/test/cloudinaryspec.coffee | kinfen/sitecore-product | 0 | expect = require("expect.js")
cloudinary = require("../cloudinary.js")
describe "cloudinary", ->
beforeEach ->
cloudinary.config cloud_name: "test123", api_key: 'a', api_secret: 'b', responsive_width_transformation: null
it "should use cloud_name from config", ->
result = cloudinary.utils.url("test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test"
it "should allow overriding cloud_name in options", ->
options = cloud_name: "test321"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test321/image/upload/test"
it "should use format from options", ->
options = format: "jpg"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test.jpg"
it "should use default secure distribution if secure=true", ->
options = secure: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://res.cloudinary.com/test123/image/upload/test"
it "should default to akamai if secure is given with private_cdn and no secure_distribution", ->
options = secure: true, private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://test123-res.cloudinary.com/image/upload/test"
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution", ->
options = secure: true, private_cdn: true, secure_distribution: "something.cloudfront.net"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://something.cloudfront.net/image/upload/test"
it "should not add cloud_name if private_cdn and not secure", ->
options = private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://test123-res.cloudinary.com/image/upload/test"
it "should use width and height from options only if crop is given", ->
options =
width: 100
height: 100
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,w_100/test"
expect(options).to.eql
width: 100
height: 100
options =
width: 100
height: 100
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: 100
height: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test"
it "should not pass width and height to html in case of fit or limit crop", ->
options =
width: 100
height: 100
crop: "limit"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_limit,h_100,w_100/test"
expect(options).to.eql {}
options =
width: 100
height: 100
crop: "fit"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fit,h_100,w_100/test"
it "should not pass width and height to html in case angle was used", ->
options =
width: 100
height: 100
crop: "scale"
angle: "auto"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test"
expect(options).to.eql {}
it "should use x, y, radius, opacity, prefix, gravity and quality from options", ->
options =
x: 1
y: 2
radius: 3
gravity: "center"
quality: 0.4
prefix: "a"
opacity: 20
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/g_center,o_20,p_a,q_0.4,r_3,x_1,y_2/test"
it "should support named transformation", ->
options = transformation: "blip"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip/test"
it "should support array of named transformations", ->
options = transformation: [ "blip", "blop" ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test"
it "should support base transformation", ->
options =
transformation:
x: 100
y: 100
crop: "fill"
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test"
it "should support array of base transformations", ->
options =
transformation: [{x: 100, y: 100, width: 200, crop: "fill"} , {radius: 10} ]
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test"
it "should not include empty transformations", ->
options = transformation: [ {}, {x: 100, y: 100, crop: "fill"} , {} ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test"
it "should support size", ->
options =
size: "10x10"
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: "10"
height: "10"
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test"
it "should use type from options", ->
options = type: "facebook"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/facebook/test"
it "should use resource_type from options", ->
options = resource_type: "raw"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/raw/upload/test"
it "should ignore http links only if type is not given ", ->
options = type: null
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://example.com/"
options = type: "fetch"
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://example.com/"
it "should escape fetch urls", ->
options = type: "fetch"
result = cloudinary.utils.url("http://blah.com/hello?a=b", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db"
it "should escape http urls", ->
options = type: "youtube"
result = cloudinary.utils.url("http://www.youtube.com/watch?v=d9NF2edxy-M", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M"
it "should support background", ->
options = background: "red"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_red/test"
options = background: "#112233"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test"
it "should support default_image", ->
options = default_image: "default"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/d_default/test"
it "should support angle", ->
options = angle: 12
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_12/test"
it "should support format for fetch urls", ->
options =
format: "jpg"
type: "fetch"
result = cloudinary.utils.url("http://cloudinary.com/images/logo.png", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png"
it "should support effect", ->
options = effect: "sepia"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia/test"
it "should support effect with param", ->
options = effect: [ "sepia", 10 ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test"
layers =
overlay: "l"
underlay: "u"
for layer of layers
it "should support #{layer}", ->
options = {}
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/#{layers[layer]}_text:hello/test"
it "should not pass width/height to html for #{layer}", ->
options =
height: 100
width: 100
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,#{layers[layer]}_text:hello,w_100/test"
it "should correctly sign api requests", ->
expect(cloudinary.utils.api_sign_request({hello: null, goodbye: 12, world: "problem"}, "1234")).to.eql "f05cfe85cee78e7e997b3c7da47ba212dcbf1ea5"
it "should correctly build signed preloaded image", ->
expect(cloudinary.utils.signed_preloaded_image(
resource_type: "image"
version: 1251251251
public_id: "abcd"
format: "jpg"
signature: "123515adfa151"
)).to.eql "image/upload/v1251251251/abcd.jpg#123515adfa151"
it "should support density", ->
options = density: 150
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/dn_150/test"
it "should support page", ->
options = page: 5
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/pg_5/test"
it "should support external cname", ->
options = cname: "hello.com"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://hello.com/test123/image/upload/test"
it "should support external cname with cdn_subdomain on", ->
options = cname: "hello.com", cdn_subdomain: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://a2.hello.com/test123/image/upload/test"
it "should support border", ->
options = border: {width: 5}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test"
options = border: {width: 5, color: "#ffaabbdd"}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test"
options = border: "1px_solid_blue"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test"
it "should support flags", ->
options = flags: "abc"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc/test"
options = flags: ["abc", "def"]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test"
it "should add version if public_id contains /", ->
result = cloudinary.utils.url("folder/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1/folder/test"
result = cloudinary.utils.url("folder/test", version: 123)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v123/folder/test"
it "should not add version if public_id contains version already", ->
result = cloudinary.utils.url("v1234/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1234/test"
it "should allow to shorted image/upload urls", ->
result = cloudinary.utils.url("test", shorten: true)
expect(result).to.eql "http://res.cloudinary.com/test123/iu/test"
it "should escape public_ids", ->
tests = {
"a b": "a%20b",
"a+b": "a%2Bb",
"a%20b": "a%20b",
"a-b": "a-b",
"a??b": "a%3F%3Fb"}
for source, target of tests
result = cloudinary.utils.url(source)
expect(result).to.eql("http://res.cloudinary.com/test123/image/upload/" + target)
it "should correctly sign a url", ->
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg"
actual = cloudinary.utils.url("image.jpg", crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
it "should correctly sign_request", ->
params = cloudinary.utils.sign_request({public_id: "folder/file", version: "1234"}, {api_key: '1234', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "1234"}
it "should correctly process_request_params", ->
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234", colors: undefined}, {api_key: '1234', api_secret: 'b', unsigned: true})
expect(params).to.eql {public_id: "folder/file", version: "1234"}
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234"}, {api_key: '1234', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "1234"}
it "should support preloaded identifier format", ->
result = cloudinary.utils.url("raw/private/v123456/document.docx")
expect(result).to.eql "http://res.cloudinary.com/test123/raw/private/v123456/document.docx"
result = cloudinary.utils.url("image/private/v123456/img.jpg", crop: "scale", width: "1.0")
expect(result).to.eql "http://res.cloudinary.com/test123/image/private/c_scale,w_1.0/v123456/img.jpg"
it "should add responsive width transformation", ->
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test"
expect(options).to.eql {responsive: true}
cloudinary.config({responsive_width_transformation: {width: "auto", crop: "pad"}})
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test"
expect(options).to.eql {responsive: true}
| 173567 | expect = require("expect.js")
cloudinary = require("../cloudinary.js")
describe "cloudinary", ->
beforeEach ->
cloudinary.config cloud_name: "test123", api_key: '<KEY>', api_secret: '<KEY>', responsive_width_transformation: null
it "should use cloud_name from config", ->
result = cloudinary.utils.url("test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test"
it "should allow overriding cloud_name in options", ->
options = cloud_name: "test321"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test321/image/upload/test"
it "should use format from options", ->
options = format: "jpg"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test.jpg"
it "should use default secure distribution if secure=true", ->
options = secure: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://res.cloudinary.com/test123/image/upload/test"
it "should default to akamai if secure is given with private_cdn and no secure_distribution", ->
options = secure: true, private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://test123-res.cloudinary.com/image/upload/test"
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution", ->
options = secure: true, private_cdn: true, secure_distribution: "something.cloudfront.net"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://something.cloudfront.net/image/upload/test"
it "should not add cloud_name if private_cdn and not secure", ->
options = private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://test123-res.cloudinary.com/image/upload/test"
it "should use width and height from options only if crop is given", ->
options =
width: 100
height: 100
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,w_100/test"
expect(options).to.eql
width: 100
height: 100
options =
width: 100
height: 100
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: 100
height: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test"
it "should not pass width and height to html in case of fit or limit crop", ->
options =
width: 100
height: 100
crop: "limit"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_limit,h_100,w_100/test"
expect(options).to.eql {}
options =
width: 100
height: 100
crop: "fit"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fit,h_100,w_100/test"
it "should not pass width and height to html in case angle was used", ->
options =
width: 100
height: 100
crop: "scale"
angle: "auto"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test"
expect(options).to.eql {}
it "should use x, y, radius, opacity, prefix, gravity and quality from options", ->
options =
x: 1
y: 2
radius: 3
gravity: "center"
quality: 0.4
prefix: "a"
opacity: 20
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/g_center,o_20,p_a,q_0.4,r_3,x_1,y_2/test"
it "should support named transformation", ->
options = transformation: "blip"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip/test"
it "should support array of named transformations", ->
options = transformation: [ "blip", "blop" ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test"
it "should support base transformation", ->
options =
transformation:
x: 100
y: 100
crop: "fill"
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test"
it "should support array of base transformations", ->
options =
transformation: [{x: 100, y: 100, width: 200, crop: "fill"} , {radius: 10} ]
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test"
it "should not include empty transformations", ->
options = transformation: [ {}, {x: 100, y: 100, crop: "fill"} , {} ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test"
it "should support size", ->
options =
size: "10x10"
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: "10"
height: "10"
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test"
it "should use type from options", ->
options = type: "facebook"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/facebook/test"
it "should use resource_type from options", ->
options = resource_type: "raw"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/raw/upload/test"
it "should ignore http links only if type is not given ", ->
options = type: null
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://example.com/"
options = type: "fetch"
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://example.com/"
it "should escape fetch urls", ->
options = type: "fetch"
result = cloudinary.utils.url("http://blah.com/hello?a=b", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db"
it "should escape http urls", ->
options = type: "youtube"
result = cloudinary.utils.url("http://www.youtube.com/watch?v=d9NF2edxy-M", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M"
it "should support background", ->
options = background: "red"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_red/test"
options = background: "#112233"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test"
it "should support default_image", ->
options = default_image: "default"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/d_default/test"
it "should support angle", ->
options = angle: 12
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_12/test"
it "should support format for fetch urls", ->
options =
format: "jpg"
type: "fetch"
result = cloudinary.utils.url("http://cloudinary.com/images/logo.png", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png"
it "should support effect", ->
options = effect: "sepia"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia/test"
it "should support effect with param", ->
options = effect: [ "sepia", 10 ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test"
layers =
overlay: "l"
underlay: "u"
for layer of layers
it "should support #{layer}", ->
options = {}
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/#{layers[layer]}_text:hello/test"
it "should not pass width/height to html for #{layer}", ->
options =
height: 100
width: 100
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,#{layers[layer]}_text:hello,w_100/test"
it "should correctly sign api requests", ->
expect(cloudinary.utils.api_sign_request({hello: null, goodbye: 12, world: "problem"}, "1234")).to.eql "f05cfe85cee78e7e997b3c7da47ba212dcbf1ea5"
it "should correctly build signed preloaded image", ->
expect(cloudinary.utils.signed_preloaded_image(
resource_type: "image"
version: 1251251251
public_id: "abcd"
format: "jpg"
signature: "123515adfa151"
)).to.eql "image/upload/v1251251251/abcd.jpg#123515adfa151"
it "should support density", ->
options = density: 150
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/dn_150/test"
it "should support page", ->
options = page: 5
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/pg_5/test"
it "should support external cname", ->
options = cname: "hello.com"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://hello.com/test123/image/upload/test"
it "should support external cname with cdn_subdomain on", ->
options = cname: "hello.com", cdn_subdomain: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://a2.hello.com/test123/image/upload/test"
it "should support border", ->
options = border: {width: 5}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test"
options = border: {width: 5, color: "#ffaabbdd"}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test"
options = border: "1px_solid_blue"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test"
it "should support flags", ->
options = flags: "abc"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc/test"
options = flags: ["abc", "def"]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test"
it "should add version if public_id contains /", ->
result = cloudinary.utils.url("folder/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1/folder/test"
result = cloudinary.utils.url("folder/test", version: 123)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v123/folder/test"
it "should not add version if public_id contains version already", ->
result = cloudinary.utils.url("v1234/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1234/test"
it "should allow to shorted image/upload urls", ->
result = cloudinary.utils.url("test", shorten: true)
expect(result).to.eql "http://res.cloudinary.com/test123/iu/test"
it "should escape public_ids", ->
tests = {
"a b": "a%20b",
"a+b": "a%2Bb",
"a%20b": "a%20b",
"a-b": "a-b",
"a??b": "a%3F%3Fb"}
for source, target of tests
result = cloudinary.utils.url(source)
expect(result).to.eql("http://res.cloudinary.com/test123/image/upload/" + target)
it "should correctly sign a url", ->
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg"
actual = cloudinary.utils.url("image.jpg", crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
it "should correctly sign_request", ->
params = cloudinary.utils.sign_request({public_id: "folder/file", version: "1234"}, {api_key: '<KEY>', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "<KEY>"}
it "should correctly process_request_params", ->
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234", colors: undefined}, {api_key: '<KEY>', api_secret: 'b', unsigned: true})
expect(params).to.eql {public_id: "folder/file", version: "1234"}
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234"}, {api_key: '<KEY>', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "<KEY>"}
it "should support preloaded identifier format", ->
result = cloudinary.utils.url("raw/private/v123456/document.docx")
expect(result).to.eql "http://res.cloudinary.com/test123/raw/private/v123456/document.docx"
result = cloudinary.utils.url("image/private/v123456/img.jpg", crop: "scale", width: "1.0")
expect(result).to.eql "http://res.cloudinary.com/test123/image/private/c_scale,w_1.0/v123456/img.jpg"
it "should add responsive width transformation", ->
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test"
expect(options).to.eql {responsive: true}
cloudinary.config({responsive_width_transformation: {width: "auto", crop: "pad"}})
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test"
expect(options).to.eql {responsive: true}
| true | expect = require("expect.js")
cloudinary = require("../cloudinary.js")
describe "cloudinary", ->
beforeEach ->
cloudinary.config cloud_name: "test123", api_key: 'PI:KEY:<KEY>END_PI', api_secret: 'PI:KEY:<KEY>END_PI', responsive_width_transformation: null
it "should use cloud_name from config", ->
result = cloudinary.utils.url("test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test"
it "should allow overriding cloud_name in options", ->
options = cloud_name: "test321"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test321/image/upload/test"
it "should use format from options", ->
options = format: "jpg"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/test.jpg"
it "should use default secure distribution if secure=true", ->
options = secure: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://res.cloudinary.com/test123/image/upload/test"
it "should default to akamai if secure is given with private_cdn and no secure_distribution", ->
options = secure: true, private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://test123-res.cloudinary.com/image/upload/test"
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution", ->
options = secure: true, private_cdn: true, secure_distribution: "something.cloudfront.net"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "https://something.cloudfront.net/image/upload/test"
it "should not add cloud_name if private_cdn and not secure", ->
options = private_cdn: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://test123-res.cloudinary.com/image/upload/test"
it "should use width and height from options only if crop is given", ->
options =
width: 100
height: 100
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,w_100/test"
expect(options).to.eql
width: 100
height: 100
options =
width: 100
height: 100
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: 100
height: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test"
it "should not pass width and height to html in case of fit or limit crop", ->
options =
width: 100
height: 100
crop: "limit"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_limit,h_100,w_100/test"
expect(options).to.eql {}
options =
width: 100
height: 100
crop: "fit"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fit,h_100,w_100/test"
it "should not pass width and height to html in case angle was used", ->
options =
width: 100
height: 100
crop: "scale"
angle: "auto"
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test"
expect(options).to.eql {}
it "should use x, y, radius, opacity, prefix, gravity and quality from options", ->
options =
x: 1
y: 2
radius: 3
gravity: "center"
quality: 0.4
prefix: "a"
opacity: 20
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/g_center,o_20,p_a,q_0.4,r_3,x_1,y_2/test"
it "should support named transformation", ->
options = transformation: "blip"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip/test"
it "should support array of named transformations", ->
options = transformation: [ "blip", "blop" ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test"
it "should support base transformation", ->
options =
transformation:
x: 100
y: 100
crop: "fill"
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test"
it "should support array of base transformations", ->
options =
transformation: [{x: 100, y: 100, width: 200, crop: "fill"} , {radius: 10} ]
crop: "crop"
width: 100
result = cloudinary.utils.url("test", options)
expect(options).to.eql width: 100
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test"
it "should not include empty transformations", ->
options = transformation: [ {}, {x: 100, y: 100, crop: "fill"} , {} ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test"
it "should support size", ->
options =
size: "10x10"
crop: "crop"
result = cloudinary.utils.url("test", options)
expect(options).to.eql
width: "10"
height: "10"
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test"
it "should use type from options", ->
options = type: "facebook"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/facebook/test"
it "should use resource_type from options", ->
options = resource_type: "raw"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/raw/upload/test"
it "should ignore http links only if type is not given ", ->
options = type: null
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://example.com/"
options = type: "fetch"
result = cloudinary.utils.url("http://example.com/", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://example.com/"
it "should escape fetch urls", ->
options = type: "fetch"
result = cloudinary.utils.url("http://blah.com/hello?a=b", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db"
it "should escape http urls", ->
options = type: "youtube"
result = cloudinary.utils.url("http://www.youtube.com/watch?v=d9NF2edxy-M", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M"
it "should support background", ->
options = background: "red"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_red/test"
options = background: "#112233"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test"
it "should support default_image", ->
options = default_image: "default"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/d_default/test"
it "should support angle", ->
options = angle: 12
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/a_12/test"
it "should support format for fetch urls", ->
options =
format: "jpg"
type: "fetch"
result = cloudinary.utils.url("http://cloudinary.com/images/logo.png", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png"
it "should support effect", ->
options = effect: "sepia"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia/test"
it "should support effect with param", ->
options = effect: [ "sepia", 10 ]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test"
layers =
overlay: "l"
underlay: "u"
for layer of layers
it "should support #{layer}", ->
options = {}
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/#{layers[layer]}_text:hello/test"
it "should not pass width/height to html for #{layer}", ->
options =
height: 100
width: 100
options[layer] = "text:hello"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/h_100,#{layers[layer]}_text:hello,w_100/test"
it "should correctly sign api requests", ->
expect(cloudinary.utils.api_sign_request({hello: null, goodbye: 12, world: "problem"}, "1234")).to.eql "f05cfe85cee78e7e997b3c7da47ba212dcbf1ea5"
it "should correctly build signed preloaded image", ->
expect(cloudinary.utils.signed_preloaded_image(
resource_type: "image"
version: 1251251251
public_id: "abcd"
format: "jpg"
signature: "123515adfa151"
)).to.eql "image/upload/v1251251251/abcd.jpg#123515adfa151"
it "should support density", ->
options = density: 150
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/dn_150/test"
it "should support page", ->
options = page: 5
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/pg_5/test"
it "should support external cname", ->
options = cname: "hello.com"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://hello.com/test123/image/upload/test"
it "should support external cname with cdn_subdomain on", ->
options = cname: "hello.com", cdn_subdomain: true
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://a2.hello.com/test123/image/upload/test"
it "should support border", ->
options = border: {width: 5}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test"
options = border: {width: 5, color: "#ffaabbdd"}
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test"
options = border: "1px_solid_blue"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test"
it "should support flags", ->
options = flags: "abc"
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc/test"
options = flags: ["abc", "def"]
result = cloudinary.utils.url("test", options)
expect(options).to.eql {}
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test"
it "should add version if public_id contains /", ->
result = cloudinary.utils.url("folder/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1/folder/test"
result = cloudinary.utils.url("folder/test", version: 123)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v123/folder/test"
it "should not add version if public_id contains version already", ->
result = cloudinary.utils.url("v1234/test")
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/v1234/test"
it "should allow to shorted image/upload urls", ->
result = cloudinary.utils.url("test", shorten: true)
expect(result).to.eql "http://res.cloudinary.com/test123/iu/test"
it "should escape public_ids", ->
tests = {
"a b": "a%20b",
"a+b": "a%2Bb",
"a%20b": "a%20b",
"a-b": "a-b",
"a??b": "a%3F%3Fb"}
for source, target of tests
result = cloudinary.utils.url(source)
expect(result).to.eql("http://res.cloudinary.com/test123/image/upload/" + target)
it "should correctly sign a url", ->
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg"
actual = cloudinary.utils.url("image.jpg", version: 1234, sign_url: true)
expect(actual).to.eql expected
expected = "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg"
actual = cloudinary.utils.url("image.jpg", crop: "crop", width: 10, height: 20, sign_url: true)
expect(actual).to.eql expected
it "should correctly sign_request", ->
params = cloudinary.utils.sign_request({public_id: "folder/file", version: "1234"}, {api_key: 'PI:KEY:<KEY>END_PI', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "PI:KEY:<KEY>END_PI"}
it "should correctly process_request_params", ->
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234", colors: undefined}, {api_key: 'PI:KEY:<KEY>END_PI', api_secret: 'b', unsigned: true})
expect(params).to.eql {public_id: "folder/file", version: "1234"}
params = cloudinary.utils.process_request_params({public_id: "folder/file", version: "1234"}, {api_key: 'PI:KEY:<KEY>END_PI', api_secret: 'b'})
expect(params).to.eql {public_id: "folder/file", version: "1234", signature: "7a3349cbb373e4812118d625047ede50b90e7b67", api_key: "PI:KEY:<KEY>END_PI"}
it "should support preloaded identifier format", ->
result = cloudinary.utils.url("raw/private/v123456/document.docx")
expect(result).to.eql "http://res.cloudinary.com/test123/raw/private/v123456/document.docx"
result = cloudinary.utils.url("image/private/v123456/img.jpg", crop: "scale", width: "1.0")
expect(result).to.eql "http://res.cloudinary.com/test123/image/private/c_scale,w_1.0/v123456/img.jpg"
it "should add responsive width transformation", ->
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test"
expect(options).to.eql {responsive: true}
cloudinary.config({responsive_width_transformation: {width: "auto", crop: "pad"}})
options =
width: 100
height: 100
crop: "crop"
responsive_width: true
result = cloudinary.utils.url("test", options)
expect(result).to.eql "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test"
expect(options).to.eql {responsive: true}
|
[
{
"context": "/real/message'\n auth:\n username: 'real-device-uuid'\n password: 'real-device-token'\n ",
"end": 1341,
"score": 0.9755188822746277,
"start": 1325,
"tag": "USERNAME",
"value": "real-device-uuid"
},
{
"context": "username: 'real-device-uui... | test/integration/real-message-event-spec.coffee | octoblu/shadow-service | 0 | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
describe 'POST /real/message', ->
beforeEach (done) ->
@meshblu = shmock 0xb33f
meshbluConfig =
server: 'localhost'
port: 0xb33f
@server = new Server
port: undefined
meshbluConfig: meshbluConfig
disableLogging: true
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.close done
afterEach (done) ->
@server.stop done
describe 'when a real device has 1 shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 204
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'real-device-token'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
it 'should update the virtual meshblu device', ->
@broadcastAsVirtualMeshbluDevice.done()
describe 'when a real device does not have permission to update the shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 403, error: 'Forbidden'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'real-device-token'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 403', ->
expect(@response.statusCode).to.equal 403, @body
describe 'when a real device has no shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'real-device-token'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
| 123246 | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
describe 'POST /real/message', ->
beforeEach (done) ->
@meshblu = shmock 0xb33f
meshbluConfig =
server: 'localhost'
port: 0xb33f
@server = new Server
port: undefined
meshbluConfig: meshbluConfig
disableLogging: true
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.close done
afterEach (done) ->
@server.stop done
describe 'when a real device has 1 shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 204
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: '<PASSWORD>'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
it 'should update the virtual meshblu device', ->
@broadcastAsVirtualMeshbluDevice.done()
describe 'when a real device does not have permission to update the shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 403, error: 'Forbidden'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: '<PASSWORD>'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 403', ->
expect(@response.statusCode).to.equal 403, @body
describe 'when a real device has no shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: '<PASSWORD>'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
| true | http = require 'http'
request = require 'request'
shmock = require '@octoblu/shmock'
Server = require '../../src/server'
describe 'POST /real/message', ->
beforeEach (done) ->
@meshblu = shmock 0xb33f
meshbluConfig =
server: 'localhost'
port: 0xb33f
@server = new Server
port: undefined
meshbluConfig: meshbluConfig
disableLogging: true
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.close done
afterEach (done) ->
@server.stop done
describe 'when a real device has 1 shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 204
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
it 'should update the virtual meshblu device', ->
@broadcastAsVirtualMeshbluDevice.done()
describe 'when a real device does not have permission to update the shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid', shadows: [{uuid: 'virtual-device-uuid'}]
@broadcastAsVirtualMeshbluDevice = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{deviceAuth}"
.set 'X-Meshblu-As', 'virtual-device-uuid'
.send
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
.reply 403, error: 'Forbidden'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 403', ->
expect(@response.statusCode).to.equal 403, @body
describe 'when a real device has no shadow', ->
beforeEach (done) ->
deviceAuth = new Buffer('real-device-uuid:real-device-token').toString('base64')
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic #{deviceAuth}"
.reply 200, uuid: 'real-device-uuid'
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/real/message'
auth:
username: 'real-device-uuid'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json:
devices: ['*']
topic: 'Greeting'
payload: {foo: 'bar'}
request.post options, (error, @response, @body) => done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204, @body
|
[
{
"context": "###\n# YCatalyst\n# Copyright(c) 2011 Jae Kwon (jae@ycatalyst.com)\n# MIT Licensed\n###\n\nrequire.p",
"end": 44,
"score": 0.9998346567153931,
"start": 36,
"tag": "NAME",
"value": "Jae Kwon"
},
{
"context": "###\n# YCatalyst\n# Copyright(c) 2011 Jae Kwon (jae@ycatalyst.... | logic/applications.coffee | jaekwon/YCatalyst | 3 | ###
# YCatalyst
# Copyright(c) 2011 Jae Kwon (jae@ycatalyst.com)
# MIT Licensed
###
require.paths.unshift 'vendor'
mongo = require '../mongo'
mailer = require './mailer'
utils = require '../utils'
config = require '../config'
# members can upvote/downvote, admins can also invite/delete
# vote: 'upvote', 'downvote', 'invite', or 'delete'
exports.vote = (application_id, current_user, vote, cb) ->
switch vote
when 'accept'
update_operation = {$addToSet: {accepted_by: current_user.username}, $pull: {denied_by: current_user.username}}
when 'deny'
update_operation = {$addToSet: {denied_by: current_user.username}, $pull: {accepted_by: current_user.username}}
when 'invite'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {invited_at: new Date(), invited_by: current_user.username}}
# also send an invite
do_invite(application_id)
when 'delete'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {deleted_at: new Date(), deleted_by: current_user.username}}
mongo.applications.update {_id: application_id}, update_operation, (err, stuff) ->
if err
cb(err)
return
cb()
# send the newly invited user an email with an invitation code.
do_invite = (application_id) ->
mongo.applications.findOne {_id: application_id}, (err, application) ->
new_invite = _id: utils.randid(), application_id: application_id, created_at: new Date()
mongo.invites.save new_invite, (err, invite) ->
mailer.send_invitation application, invite
| 184323 | ###
# YCatalyst
# Copyright(c) 2011 <NAME> (<EMAIL>)
# MIT Licensed
###
require.paths.unshift 'vendor'
mongo = require '../mongo'
mailer = require './mailer'
utils = require '../utils'
config = require '../config'
# members can upvote/downvote, admins can also invite/delete
# vote: 'upvote', 'downvote', 'invite', or 'delete'
exports.vote = (application_id, current_user, vote, cb) ->
switch vote
when 'accept'
update_operation = {$addToSet: {accepted_by: current_user.username}, $pull: {denied_by: current_user.username}}
when 'deny'
update_operation = {$addToSet: {denied_by: current_user.username}, $pull: {accepted_by: current_user.username}}
when 'invite'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {invited_at: new Date(), invited_by: current_user.username}}
# also send an invite
do_invite(application_id)
when 'delete'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {deleted_at: new Date(), deleted_by: current_user.username}}
mongo.applications.update {_id: application_id}, update_operation, (err, stuff) ->
if err
cb(err)
return
cb()
# send the newly invited user an email with an invitation code.
do_invite = (application_id) ->
mongo.applications.findOne {_id: application_id}, (err, application) ->
new_invite = _id: utils.randid(), application_id: application_id, created_at: new Date()
mongo.invites.save new_invite, (err, invite) ->
mailer.send_invitation application, invite
| true | ###
# YCatalyst
# Copyright(c) 2011 PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# MIT Licensed
###
require.paths.unshift 'vendor'
mongo = require '../mongo'
mailer = require './mailer'
utils = require '../utils'
config = require '../config'
# members can upvote/downvote, admins can also invite/delete
# vote: 'upvote', 'downvote', 'invite', or 'delete'
exports.vote = (application_id, current_user, vote, cb) ->
switch vote
when 'accept'
update_operation = {$addToSet: {accepted_by: current_user.username}, $pull: {denied_by: current_user.username}}
when 'deny'
update_operation = {$addToSet: {denied_by: current_user.username}, $pull: {accepted_by: current_user.username}}
when 'invite'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {invited_at: new Date(), invited_by: current_user.username}}
# also send an invite
do_invite(application_id)
when 'delete'
if not current_user.is_admin
cb("unauthorized")
return
update_operation = {$set: {deleted_at: new Date(), deleted_by: current_user.username}}
mongo.applications.update {_id: application_id}, update_operation, (err, stuff) ->
if err
cb(err)
return
cb()
# send the newly invited user an email with an invitation code.
do_invite = (application_id) ->
mongo.applications.findOne {_id: application_id}, (err, application) ->
new_invite = _id: utils.randid(), application_id: application_id, created_at: new Date()
mongo.invites.save new_invite, (err, invite) ->
mailer.send_invitation application, invite
|
[
{
"context": "tores the object heirarchy for the PDF document\nBy Devon Govett\n###\n\nPDFReference = require './reference'\n\nclass ",
"end": 85,
"score": 0.9997627139091492,
"start": 73,
"tag": "NAME",
"value": "Devon Govett"
}
] | lib/store.coffee | stanfeldman/pdf.js | 1 | ###
PDFObjectStore - stores the object heirarchy for the PDF document
By Devon Govett
###
PDFReference = require './reference'
class PDFObjectStore
constructor: ->
@objects = {}
@length = 0
@root = @ref
Type: 'Catalog'
@root.data['Pages'] = @ref
Type: 'Pages'
Count: 0
Kids: []
@pages = @root.data['Pages']
ref: (data) ->
@push ++@length, data
push: (id, data) ->
ref = new PDFReference(id, data)
@objects[id] = ref
return ref
addPage: (page) ->
@pages.data['Kids'].push(page.dictionary)
@pages.data['Count']++
module.exports = PDFObjectStore | 216355 | ###
PDFObjectStore - stores the object heirarchy for the PDF document
By <NAME>
###
PDFReference = require './reference'
class PDFObjectStore
constructor: ->
@objects = {}
@length = 0
@root = @ref
Type: 'Catalog'
@root.data['Pages'] = @ref
Type: 'Pages'
Count: 0
Kids: []
@pages = @root.data['Pages']
ref: (data) ->
@push ++@length, data
push: (id, data) ->
ref = new PDFReference(id, data)
@objects[id] = ref
return ref
addPage: (page) ->
@pages.data['Kids'].push(page.dictionary)
@pages.data['Count']++
module.exports = PDFObjectStore | true | ###
PDFObjectStore - stores the object heirarchy for the PDF document
By PI:NAME:<NAME>END_PI
###
PDFReference = require './reference'
class PDFObjectStore
constructor: ->
@objects = {}
@length = 0
@root = @ref
Type: 'Catalog'
@root.data['Pages'] = @ref
Type: 'Pages'
Count: 0
Kids: []
@pages = @root.data['Pages']
ref: (data) ->
@push ++@length, data
push: (id, data) ->
ref = new PDFReference(id, data)
@objects[id] = ref
return ref
addPage: (page) ->
@pages.data['Kids'].push(page.dictionary)
@pages.data['Count']++
module.exports = PDFObjectStore |
[
{
"context": "ntroller - table base data manage class\n# Coded by Hajime Oh-yake 2013.09.10\n#*************************************",
"end": 123,
"score": 0.9998907446861267,
"start": 109,
"tag": "NAME",
"value": "Hajime Oh-yake"
}
] | JSKit/01_JSTableViewController.coffee | digitarhythm/codeJS | 0 | #*****************************************
# JSTableViewController - table base data manage class
# Coded by Hajime Oh-yake 2013.09.10
#*****************************************
class JSTableViewController extends JSObject
constructor:(frame)->
super()
@_tableViewStyle = "UITableViewStylePlain"
@_bgColor = JSColor("white")
@_tableView = new JSTableView(frame)
@_tableView.delegate = @_self
@_tableView.dataSource = @_self
| 125531 | #*****************************************
# JSTableViewController - table base data manage class
# Coded by <NAME> 2013.09.10
#*****************************************
class JSTableViewController extends JSObject
constructor:(frame)->
super()
@_tableViewStyle = "UITableViewStylePlain"
@_bgColor = JSColor("white")
@_tableView = new JSTableView(frame)
@_tableView.delegate = @_self
@_tableView.dataSource = @_self
| true | #*****************************************
# JSTableViewController - table base data manage class
# Coded by PI:NAME:<NAME>END_PI 2013.09.10
#*****************************************
class JSTableViewController extends JSObject
constructor:(frame)->
super()
@_tableViewStyle = "UITableViewStylePlain"
@_bgColor = JSColor("white")
@_tableView = new JSTableView(frame)
@_tableView.delegate = @_self
@_tableView.dataSource = @_self
|
[
{
"context": "les\n- .git\n- test\n'''\n\nsneaky 'ws', ->\n @user = 'jarvis'\n @path = '/usr/local/teambition/talk-vote-bot'\n",
"end": 80,
"score": 0.9995924234390259,
"start": 74,
"tag": "USERNAME",
"value": "jarvis"
},
{
"context": "talk-vote-bot\n '''\n\nsneaky 'prod', ->\n @... | Skyfile.coffee | jianliaoim/talk-vote-bot | 5 | filter = '''
- node_modules
- .git
- test
'''
sneaky 'ws', ->
@user = 'jarvis'
@path = '/usr/local/teambition/talk-vote-bot'
@filter = filter
@host = 'talk.ci'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
sneaky 'prod', ->
@user = 'jarvis'
@path = '/data/app/talk-vote-bot'
@filter = filter
@host = '120.26.2.181'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
| 99580 | filter = '''
- node_modules
- .git
- test
'''
sneaky 'ws', ->
@user = 'jarvis'
@path = '/usr/local/teambition/talk-vote-bot'
@filter = filter
@host = 'talk.ci'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
sneaky 'prod', ->
@user = 'jarvis'
@path = '/data/app/talk-vote-bot'
@filter = filter
@host = '172.16.17.32'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
| true | filter = '''
- node_modules
- .git
- test
'''
sneaky 'ws', ->
@user = 'jarvis'
@path = '/usr/local/teambition/talk-vote-bot'
@filter = filter
@host = 'talk.ci'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
sneaky 'prod', ->
@user = 'jarvis'
@path = '/data/app/talk-vote-bot'
@filter = filter
@host = 'PI:IP_ADDRESS:172.16.17.32END_PI'
@after '''
mkdir -p ../share/node_modules \
&& ln -sfn ../share/node_modules . \
&& rm -rf config \
&& ln -sfn ../share/config . \
&& npm i --production && pm2 restart talk-vote-bot
'''
|
[
{
"context": "###\n# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)\n#",
"end": 21,
"score": 0.9981597065925598,
"start": 14,
"tag": "USERNAME",
"value": "iTonyYo"
},
{
"context": "###\n# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)\n# Last Update ... | node_modules/node-find-folder/gulp/coffeescript.coffee | long-grass/mikey | 0 | ###
# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
clp = require './clp'
mrg = require 'merge-stream'
lazypipe = require 'lazypipe'
_coffeelint = lazypipe()
.pipe $.coffeelint, 'coffeelint.json'
.pipe $.coffeelint.reporter
_cs = lazypipe()
.pipe ->
$.if clp.coffeelint, _coffeelint()
.pipe $.coffee, cfg.cs_opts
gulp.task 'coffeescript', ->
ff_src = gulp.src cfg.path.dev + 'node.find.folder.coffee'
test_src = gulp.src cfg.path.dev + 'test.coffee'
ff_src.pipe $.changed cfg.path.project_root
.pipe $.plumber()
.pipe _cs()
.pipe $.rename
dirname: ''
basename: 'index'
extname: '.js'
.pipe gulp.dest cfg.path.project_root
test_src.pipe $.changed cfg.path.test
.pipe $.plumber()
.pipe _cs()
.pipe gulp.dest cfg.path.test
mrg ff_src, test_src
| 75807 | ###
# Author: iTonyYo <<EMAIL>> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <<EMAIL>> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
clp = require './clp'
mrg = require 'merge-stream'
lazypipe = require 'lazypipe'
_coffeelint = lazypipe()
.pipe $.coffeelint, 'coffeelint.json'
.pipe $.coffeelint.reporter
_cs = lazypipe()
.pipe ->
$.if clp.coffeelint, _coffeelint()
.pipe $.coffee, cfg.cs_opts
gulp.task 'coffeescript', ->
ff_src = gulp.src cfg.path.dev + 'node.find.folder.coffee'
test_src = gulp.src cfg.path.dev + 'test.coffee'
ff_src.pipe $.changed cfg.path.project_root
.pipe $.plumber()
.pipe _cs()
.pipe $.rename
dirname: ''
basename: 'index'
extname: '.js'
.pipe gulp.dest cfg.path.project_root
test_src.pipe $.changed cfg.path.test
.pipe $.plumber()
.pipe _cs()
.pipe gulp.dest cfg.path.test
mrg ff_src, test_src
| true | ###
# Author: iTonyYo <PI:EMAIL:<EMAIL>END_PI> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <PI:EMAIL:<EMAIL>END_PI> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
clp = require './clp'
mrg = require 'merge-stream'
lazypipe = require 'lazypipe'
_coffeelint = lazypipe()
.pipe $.coffeelint, 'coffeelint.json'
.pipe $.coffeelint.reporter
_cs = lazypipe()
.pipe ->
$.if clp.coffeelint, _coffeelint()
.pipe $.coffee, cfg.cs_opts
gulp.task 'coffeescript', ->
ff_src = gulp.src cfg.path.dev + 'node.find.folder.coffee'
test_src = gulp.src cfg.path.dev + 'test.coffee'
ff_src.pipe $.changed cfg.path.project_root
.pipe $.plumber()
.pipe _cs()
.pipe $.rename
dirname: ''
basename: 'index'
extname: '.js'
.pipe gulp.dest cfg.path.project_root
test_src.pipe $.changed cfg.path.test
.pipe $.plumber()
.pipe _cs()
.pipe gulp.dest cfg.path.test
mrg ff_src, test_src
|
[
{
"context": "exports.petname = ->\n \"scruffy\"\n\n",
"end": 31,
"score": 0.8514324426651001,
"start": 24,
"tag": "NAME",
"value": "scruffy"
}
] | src/petname.coffee | toolbear/petname | 0 | exports.petname = ->
"scruffy"
| 113473 | exports.petname = ->
"<NAME>"
| true | exports.petname = ->
"PI:NAME:<NAME>END_PI"
|
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998892545700073,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/admin/project-values.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/project-profile.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
trim = @.taiga.trim
toString = @.taiga.toString
joinStr = @.taiga.joinStr
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaAdmin")
#############################################################################
## Project values Controller
#############################################################################
class ProjectValuesController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
@scope.project = {}
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Project values - " + @scope.sectionName + " - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
@scope.$on("admin:project-values:move", @.moveValue)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadValues: ->
return @rs[@scope.resource].listValues(@scope.projectId, @scope.type).then (values) =>
@scope.values = values
@scope.maxValueOrder = _.max(values, "order").order
return values
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then( => @q.all([
@.loadProject(),
@.loadValues(),
]))
moveValue: (ctx, itemValue, itemIndex) =>
values = @scope.values
r = values.indexOf(itemValue)
values.splice(r, 1)
values.splice(itemIndex, 0, itemValue)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("ProjectValuesController", ProjectValuesController)
#############################################################################
## Project values directive
#############################################################################
ProjectValuesDirective = ($log, $repo, $confirm, $location, animationFrame) ->
## Drag & Drop Link
linkDragAndDrop = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el.find(".sortable")
tdom.sortable({
handle: ".row.table-main.visualization",
dropOnEmpty: true
connectWith: ".project-values-body"
revert: 400
axis: "y"
})
tdom.on "sortstop", (event, ui) ->
itemEl = ui.item
itemValue = itemEl.scope().value
itemIndex = itemEl.index()
$scope.$broadcast("admin:project-values:move", itemValue, itemIndex)
$scope.$on "$destroy", ->
$el.off()
## Value Link
linkValue = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
valueType = $attrs.type
initializeNewValue = ->
$scope.newValue = {
"name": ""
"is_closed": false
}
initializeNewValue()
goToBottomList = (focus = false) =>
table = $el.find(".table-main")
$(document.body).scrollTop(table.offset().top + table.height())
if focus
$(".new-value input").focus()
submit = debounce 2000, =>
promise = $repo.save($scope.project)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error", data._error_message)
saveValue = debounce 2000, (target) ->
form = target.parents("form").checksley()
return if not form.validate()
value = target.scope().value
promise = $repo.save(value)
promise.then =>
row = target.parents(".row.table-main")
row.addClass("hidden")
row.siblings(".visualization").removeClass('hidden')
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
cancel = (target) ->
row = target.parents(".row.table-main")
value = target.scope().value
$scope.$apply ->
row.addClass("hidden")
value.revert()
row.siblings(".visualization").removeClass('hidden')
$el.on "submit", "form", (event) ->
event.preventDefault()
submit()
$el.on "click", "form a.button-green", (event) ->
event.preventDefault()
submit()
$el.on "click", ".show-add-new", (event) ->
event.preventDefault()
$el.find(".new-value").removeClass('hidden')
goToBottomList(true)
$el.on "click", ".add-new", debounce 2000, (event) ->
event.preventDefault()
form = $el.find(".new-value").parents("form").checksley()
return if not form.validate()
$scope.newValue.project = $scope.project.id
$scope.newValue.order = if $scope.maxValueOrder then $scope.maxValueOrder + 1 else 1
promise = $repo.create(valueType, $scope.newValue)
promise.then =>
$ctrl.loadValues().then ->
animationFrame.add () ->
goToBottomList()
$el.find(".new-value").addClass("hidden")
initializeNewValue()
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
$el.on "click", ".delete-new", (event) ->
event.preventDefault()
$el.find(".new-value").hide()
initializeNewValue()
$el.on "click", ".edit-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
row = target.parents(".row.table-main")
row.addClass("hidden")
editionRow = row.siblings(".edition")
editionRow.removeClass('hidden')
editionRow.find('input:visible').first().focus().select()
$el.on "keyup", ".edition input", (event) ->
if event.keyCode == 13
target = angular.element(event.currentTarget)
saveValue(target)
else if event.keyCode == 27
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".save", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
saveValue(target)
$el.on "click", ".cancel", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".delete-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
value = target.scope().value
choices = {}
_.each $scope.values, (option) ->
if value.id != option.id
choices[option.id] = option.name
#TODO: i18n
title = "Delete value"
subtitle = value.name
replacement = "All items with this value will be changed to"
if _.keys(choices).length == 0
return $confirm.error("You can't delete all values.")
return $confirm.askChoice(title, subtitle, choices, replacement).then (response) ->
onSucces = ->
$ctrl.loadValues().finally ->
response.finish()
onError = ->
$confirm.notify("error")
$repo.remove(value, {"moveTo": response.selected}).then(onSucces, onError)
link = ($scope, $el, $attrs) ->
linkDragAndDrop($scope, $el, $attrs)
linkValue($scope, $el, $attrs)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgProjectValues", ["$log", "$tgRepo", "$tgConfirm", "$tgLocation", "animationFrame",
ProjectValuesDirective])
#############################################################################
## Color selection directive
#############################################################################
ColorSelectionDirective = () ->
## Color selection Link
link = ($scope, $el, $attrs, $model) ->
$ctrl = $el.controller()
$scope.$watch $attrs.ngModel, (element) ->
$scope.color = element.color
$el.on "click", ".current-color", (event) ->
# Showing the color selector
event.preventDefault()
event.stopPropagation()
target = angular.element(event.currentTarget)
$el.find(".select-color").hide()
target.siblings(".select-color").show()
# Hide when click outside
body = angular.element("body")
body.on "click", (event) =>
if angular.element(event.target).parent(".select-color").length == 0
$el.find(".select-color").hide()
body.unbind("click")
$el.on "click", ".select-color .color", (event) ->
# Selecting one color on color selector
event.preventDefault()
target = angular.element(event.currentTarget)
$scope.$apply ->
$model.$modelValue.color = target.data("color")
$el.find(".select-color").hide()
$el.on "click", ".select-color .selected-color", (event) ->
event.preventDefault()
$scope.$apply ->
$model.$modelValue.color = $scope.color
$el.find(".select-color").hide()
$scope.$on "$destroy", ->
$el.off()
return {
link: link
require:"ngModel"
}
module.directive("tgColorSelection", ColorSelectionDirective)
| 27971 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/project-profile.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
trim = @.taiga.trim
toString = @.taiga.toString
joinStr = @.taiga.joinStr
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaAdmin")
#############################################################################
## Project values Controller
#############################################################################
class ProjectValuesController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
@scope.project = {}
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Project values - " + @scope.sectionName + " - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
@scope.$on("admin:project-values:move", @.moveValue)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadValues: ->
return @rs[@scope.resource].listValues(@scope.projectId, @scope.type).then (values) =>
@scope.values = values
@scope.maxValueOrder = _.max(values, "order").order
return values
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then( => @q.all([
@.loadProject(),
@.loadValues(),
]))
moveValue: (ctx, itemValue, itemIndex) =>
values = @scope.values
r = values.indexOf(itemValue)
values.splice(r, 1)
values.splice(itemIndex, 0, itemValue)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("ProjectValuesController", ProjectValuesController)
#############################################################################
## Project values directive
#############################################################################
ProjectValuesDirective = ($log, $repo, $confirm, $location, animationFrame) ->
## Drag & Drop Link
linkDragAndDrop = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el.find(".sortable")
tdom.sortable({
handle: ".row.table-main.visualization",
dropOnEmpty: true
connectWith: ".project-values-body"
revert: 400
axis: "y"
})
tdom.on "sortstop", (event, ui) ->
itemEl = ui.item
itemValue = itemEl.scope().value
itemIndex = itemEl.index()
$scope.$broadcast("admin:project-values:move", itemValue, itemIndex)
$scope.$on "$destroy", ->
$el.off()
## Value Link
linkValue = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
valueType = $attrs.type
initializeNewValue = ->
$scope.newValue = {
"name": ""
"is_closed": false
}
initializeNewValue()
goToBottomList = (focus = false) =>
table = $el.find(".table-main")
$(document.body).scrollTop(table.offset().top + table.height())
if focus
$(".new-value input").focus()
submit = debounce 2000, =>
promise = $repo.save($scope.project)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error", data._error_message)
saveValue = debounce 2000, (target) ->
form = target.parents("form").checksley()
return if not form.validate()
value = target.scope().value
promise = $repo.save(value)
promise.then =>
row = target.parents(".row.table-main")
row.addClass("hidden")
row.siblings(".visualization").removeClass('hidden')
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
cancel = (target) ->
row = target.parents(".row.table-main")
value = target.scope().value
$scope.$apply ->
row.addClass("hidden")
value.revert()
row.siblings(".visualization").removeClass('hidden')
$el.on "submit", "form", (event) ->
event.preventDefault()
submit()
$el.on "click", "form a.button-green", (event) ->
event.preventDefault()
submit()
$el.on "click", ".show-add-new", (event) ->
event.preventDefault()
$el.find(".new-value").removeClass('hidden')
goToBottomList(true)
$el.on "click", ".add-new", debounce 2000, (event) ->
event.preventDefault()
form = $el.find(".new-value").parents("form").checksley()
return if not form.validate()
$scope.newValue.project = $scope.project.id
$scope.newValue.order = if $scope.maxValueOrder then $scope.maxValueOrder + 1 else 1
promise = $repo.create(valueType, $scope.newValue)
promise.then =>
$ctrl.loadValues().then ->
animationFrame.add () ->
goToBottomList()
$el.find(".new-value").addClass("hidden")
initializeNewValue()
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
$el.on "click", ".delete-new", (event) ->
event.preventDefault()
$el.find(".new-value").hide()
initializeNewValue()
$el.on "click", ".edit-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
row = target.parents(".row.table-main")
row.addClass("hidden")
editionRow = row.siblings(".edition")
editionRow.removeClass('hidden')
editionRow.find('input:visible').first().focus().select()
$el.on "keyup", ".edition input", (event) ->
if event.keyCode == 13
target = angular.element(event.currentTarget)
saveValue(target)
else if event.keyCode == 27
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".save", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
saveValue(target)
$el.on "click", ".cancel", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".delete-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
value = target.scope().value
choices = {}
_.each $scope.values, (option) ->
if value.id != option.id
choices[option.id] = option.name
#TODO: i18n
title = "Delete value"
subtitle = value.name
replacement = "All items with this value will be changed to"
if _.keys(choices).length == 0
return $confirm.error("You can't delete all values.")
return $confirm.askChoice(title, subtitle, choices, replacement).then (response) ->
onSucces = ->
$ctrl.loadValues().finally ->
response.finish()
onError = ->
$confirm.notify("error")
$repo.remove(value, {"moveTo": response.selected}).then(onSucces, onError)
link = ($scope, $el, $attrs) ->
linkDragAndDrop($scope, $el, $attrs)
linkValue($scope, $el, $attrs)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgProjectValues", ["$log", "$tgRepo", "$tgConfirm", "$tgLocation", "animationFrame",
ProjectValuesDirective])
#############################################################################
## Color selection directive
#############################################################################
ColorSelectionDirective = () ->
## Color selection Link
link = ($scope, $el, $attrs, $model) ->
$ctrl = $el.controller()
$scope.$watch $attrs.ngModel, (element) ->
$scope.color = element.color
$el.on "click", ".current-color", (event) ->
# Showing the color selector
event.preventDefault()
event.stopPropagation()
target = angular.element(event.currentTarget)
$el.find(".select-color").hide()
target.siblings(".select-color").show()
# Hide when click outside
body = angular.element("body")
body.on "click", (event) =>
if angular.element(event.target).parent(".select-color").length == 0
$el.find(".select-color").hide()
body.unbind("click")
$el.on "click", ".select-color .color", (event) ->
# Selecting one color on color selector
event.preventDefault()
target = angular.element(event.currentTarget)
$scope.$apply ->
$model.$modelValue.color = target.data("color")
$el.find(".select-color").hide()
$el.on "click", ".select-color .selected-color", (event) ->
event.preventDefault()
$scope.$apply ->
$model.$modelValue.color = $scope.color
$el.find(".select-color").hide()
$scope.$on "$destroy", ->
$el.off()
return {
link: link
require:"ngModel"
}
module.directive("tgColorSelection", ColorSelectionDirective)
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/project-profile.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
trim = @.taiga.trim
toString = @.taiga.toString
joinStr = @.taiga.joinStr
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaAdmin")
#############################################################################
## Project values Controller
#############################################################################
class ProjectValuesController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
@scope.project = {}
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Project values - " + @scope.sectionName + " - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
@scope.$on("admin:project-values:move", @.moveValue)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadValues: ->
return @rs[@scope.resource].listValues(@scope.projectId, @scope.type).then (values) =>
@scope.values = values
@scope.maxValueOrder = _.max(values, "order").order
return values
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then( => @q.all([
@.loadProject(),
@.loadValues(),
]))
moveValue: (ctx, itemValue, itemIndex) =>
values = @scope.values
r = values.indexOf(itemValue)
values.splice(r, 1)
values.splice(itemIndex, 0, itemValue)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("ProjectValuesController", ProjectValuesController)
#############################################################################
## Project values directive
#############################################################################
ProjectValuesDirective = ($log, $repo, $confirm, $location, animationFrame) ->
## Drag & Drop Link
linkDragAndDrop = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el.find(".sortable")
tdom.sortable({
handle: ".row.table-main.visualization",
dropOnEmpty: true
connectWith: ".project-values-body"
revert: 400
axis: "y"
})
tdom.on "sortstop", (event, ui) ->
itemEl = ui.item
itemValue = itemEl.scope().value
itemIndex = itemEl.index()
$scope.$broadcast("admin:project-values:move", itemValue, itemIndex)
$scope.$on "$destroy", ->
$el.off()
## Value Link
linkValue = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
valueType = $attrs.type
initializeNewValue = ->
$scope.newValue = {
"name": ""
"is_closed": false
}
initializeNewValue()
goToBottomList = (focus = false) =>
table = $el.find(".table-main")
$(document.body).scrollTop(table.offset().top + table.height())
if focus
$(".new-value input").focus()
submit = debounce 2000, =>
promise = $repo.save($scope.project)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error", data._error_message)
saveValue = debounce 2000, (target) ->
form = target.parents("form").checksley()
return if not form.validate()
value = target.scope().value
promise = $repo.save(value)
promise.then =>
row = target.parents(".row.table-main")
row.addClass("hidden")
row.siblings(".visualization").removeClass('hidden')
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
cancel = (target) ->
row = target.parents(".row.table-main")
value = target.scope().value
$scope.$apply ->
row.addClass("hidden")
value.revert()
row.siblings(".visualization").removeClass('hidden')
$el.on "submit", "form", (event) ->
event.preventDefault()
submit()
$el.on "click", "form a.button-green", (event) ->
event.preventDefault()
submit()
$el.on "click", ".show-add-new", (event) ->
event.preventDefault()
$el.find(".new-value").removeClass('hidden')
goToBottomList(true)
$el.on "click", ".add-new", debounce 2000, (event) ->
event.preventDefault()
form = $el.find(".new-value").parents("form").checksley()
return if not form.validate()
$scope.newValue.project = $scope.project.id
$scope.newValue.order = if $scope.maxValueOrder then $scope.maxValueOrder + 1 else 1
promise = $repo.create(valueType, $scope.newValue)
promise.then =>
$ctrl.loadValues().then ->
animationFrame.add () ->
goToBottomList()
$el.find(".new-value").addClass("hidden")
initializeNewValue()
promise.then null, (data) ->
$confirm.notify("error")
form.setErrors(data)
$el.on "click", ".delete-new", (event) ->
event.preventDefault()
$el.find(".new-value").hide()
initializeNewValue()
$el.on "click", ".edit-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
row = target.parents(".row.table-main")
row.addClass("hidden")
editionRow = row.siblings(".edition")
editionRow.removeClass('hidden')
editionRow.find('input:visible').first().focus().select()
$el.on "keyup", ".edition input", (event) ->
if event.keyCode == 13
target = angular.element(event.currentTarget)
saveValue(target)
else if event.keyCode == 27
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".save", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
saveValue(target)
$el.on "click", ".cancel", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
cancel(target)
$el.on "click", ".delete-value", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
value = target.scope().value
choices = {}
_.each $scope.values, (option) ->
if value.id != option.id
choices[option.id] = option.name
#TODO: i18n
title = "Delete value"
subtitle = value.name
replacement = "All items with this value will be changed to"
if _.keys(choices).length == 0
return $confirm.error("You can't delete all values.")
return $confirm.askChoice(title, subtitle, choices, replacement).then (response) ->
onSucces = ->
$ctrl.loadValues().finally ->
response.finish()
onError = ->
$confirm.notify("error")
$repo.remove(value, {"moveTo": response.selected}).then(onSucces, onError)
link = ($scope, $el, $attrs) ->
linkDragAndDrop($scope, $el, $attrs)
linkValue($scope, $el, $attrs)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgProjectValues", ["$log", "$tgRepo", "$tgConfirm", "$tgLocation", "animationFrame",
ProjectValuesDirective])
#############################################################################
## Color selection directive
#############################################################################
ColorSelectionDirective = () ->
## Color selection Link
link = ($scope, $el, $attrs, $model) ->
$ctrl = $el.controller()
$scope.$watch $attrs.ngModel, (element) ->
$scope.color = element.color
$el.on "click", ".current-color", (event) ->
# Showing the color selector
event.preventDefault()
event.stopPropagation()
target = angular.element(event.currentTarget)
$el.find(".select-color").hide()
target.siblings(".select-color").show()
# Hide when click outside
body = angular.element("body")
body.on "click", (event) =>
if angular.element(event.target).parent(".select-color").length == 0
$el.find(".select-color").hide()
body.unbind("click")
$el.on "click", ".select-color .color", (event) ->
# Selecting one color on color selector
event.preventDefault()
target = angular.element(event.currentTarget)
$scope.$apply ->
$model.$modelValue.color = target.data("color")
$el.find(".select-color").hide()
$el.on "click", ".select-color .selected-color", (event) ->
event.preventDefault()
$scope.$apply ->
$model.$modelValue.color = $scope.color
$el.find(".select-color").hide()
$scope.$on "$destroy", ->
$el.off()
return {
link: link
require:"ngModel"
}
module.directive("tgColorSelection", ColorSelectionDirective)
|
[
{
"context": "\n {\n content:\n $like: \"Dummy\"\n },\n {\n featured:\n ",
"end": 10379,
"score": 0.9858568906784058,
"start": 10374,
"tag": "NAME",
"value": "Dummy"
},
{
"context": ", false\n\n\n it \"$any and $all\", ->\n ... | test/suite.coffee | blitmap/underscore-query | 77 | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, colors: "blue"
assert.equal result.length, 2
result = _query a, colors: ["red", "blue"]
assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
result = _query a, score: {$lt: null}
assert.equal result.length, 0
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
result = _query a, score: {$lte: null}
assert.equal result.length, 0
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
result = _query a, likes: {$gt: null}
assert.equal result.length, 0
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
result = _query a, likes: {$gte: null}
assert.equal result.length, 0
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
result = _query a, likes: {$between: [null, 5]}
assert.equal result.length, 0
it "$between errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: []}
assert.throws ->
result = _query a, likes: {$between: [5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
result = _query a, likes: {$betweene: [null, 10]}
assert.equal result.length, 0
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$mod errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$mod: []}
assert.throws ->
result = _query a, likes: {$mod: [5]}
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$none operator", ->
a = create()
result = _query a, colors: {$none: ["yellow","blue"]}
assert.deepEqual result, [a[1]]
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "$options errors without regexp", ->
a = create()
assert.throws(->
result = _query a, content: {$options: 'i'}
)
it "$cb - callback", ->
a = create()
fn = (attr) ->
attr.charAt(0).toLowerCase() is "c"
result = _query a,
title: $cb: fn
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$cb - callback - checking 'this' is the model", ->
a = create()
result = _query a, title:
$cb: (attr) -> @title is "Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
query = _query.testWith(likes: $lt: -> count += 1)
result = _.filter(a, query)
assert.equal (result).length, 1
result = _.filter(a, query)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [{
x: {
$gt: 1
},
y: {
$lt: 4
}
}, {
foo: 1
}]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
for type in ["$and", "$or", "$nor"]
it type + " throws error when not an array", ->
a = create()
query = {}
query[type] = {
a: 1
}
assert.throws((-> _query(a, query)), Error);
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "Dummy"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "test", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "test1", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "test2", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "test3", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it.skip "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with underscore chain", ->
a = create()
q =
$or: [
{title:"Home"}
{title:"About"}
]
result = _.chain(a).query(q).pluck("title").value()
assert.equal result.length, 2
assert.equal result[0], "Home"
assert.equal result[1], "About"
it "works with a getter property", ->
Backbone = require "backbone"
a = new Backbone.Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = _query a.models, {title:"about"}, "get"
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
it "can be mixed into backbone collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) -> _query @models, params, "get"
whereBy: (params) -> new @constructor @query(params)
buildQuery: -> _query.build @models, "get"
a = new Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = a.query {title:"about"}
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
result2 = a.whereBy {title:"about"}
assert.equal result2.length, 1
assert.equal result2.at(0).get("title"), "about"
assert.equal result2.pluck("title")[0], "about"
result3 = a.buildQuery().not(title:"test").run()
assert.equal result3.length, 1
assert.equal result3[0].get("title"), "about"
it "can be used for live collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) ->
if params
_query @models, params, "get"
else
_query.build @models, "get"
whereBy: (params) -> new @constructor @query(params)
setFilter: (parent, query) ->
check = _query.tester(query, "get")
@listenTo parent,
add: (model) -> if check(model) then @add(model)
remove: @remove
change: (model) ->
if check(model) then @add(model) else @remove(model)
@add _query(parent.models, query, "get")
parent = new Collection [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript"}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript"}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP"}
]
live = new Collection
live.setFilter parent, {likes:$gt:15}
assert.equal parent.length, 3
assert.equal live.length, 1
# Change Events
parent.at(0).set("likes",16)
assert.equal live.length, 2
parent.at(2).set("likes",2)
assert.equal live.length, 1
# Add to Parent
parent.add [{title:"New", likes:21}, {title:"New2", likes:3}]
assert.equal live.length, 2
assert.equal parent.length, 5
# Remove from Parent
parent.pop()
parent.pop()
assert.equal live.length, 1
it "buildQuery works in oo fashion", ->
a = create()
query = _query.build(a)
.and({likes: {$gt: 5}})
.or({content: {$like: "PHP"}})
.or({colors: {$contains: "yellow"}})
result = query.run()
assert.equal result.length, 2
result = _query.build()
.and(likes: $lt: 15)
.or(content: $like: "Dummy")
.or(featured: $exists: true)
.not(colors: $contains: "yellow")
.run(a)
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "works with seperate query args", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
query = _query.build(collection)
.and("title", "Home")
result = query.run()
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$computed", ->
Backbone = require "backbone"
class testModel extends Backbone.Model
full_name: -> "#{@get 'first_name'} #{@get 'last_name'}"
a = new testModel
first_name: "Dave"
last_name: "Tonge"
b = new testModel
first_name: "John"
last_name: "Smith"
c = [a,b]
result = _query c,
full_name: $computed: "Dave Tonge"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "Dave"
result = _query c,
full_name: $computed: $likeI: "n sm"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "John"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "has a score method", ->
collection = [
{ name:'dress', color:'red', price:100 }
{ name:'shoes', color:'black', price:120 }
{ name:'jacket', color:'blue', price:150 }
]
results = _query.score( collection, { price: {$lt:140}, color: {$in:['red', 'blue'] }})
assert.equal _.findWhere(results, {name:'dress'})._score, 1
assert.equal _.findWhere(results, {name:'shoes'})._score, 0.5
it "has a score method with a $boost operator", ->
collection = [
{ name:'dress', color:'red', price:100 }
{ name:'shoes', color:'black', price:120 }
{ name:'jacket', color:'blue', price:150 }
]
results = _query.score( collection, { price: 100, color: {$in:['black'], $boost:3 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 0.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1.5
it "has a score method with a $boost operator - 2", ->
collection = [
{ name:'dress', color:'red', price:100 }
{ name:'shoes', color:'black', price:120 }
{ name:'jacket', color:'blue', price:150 }
]
results = _query.score( collection, { name: {$like:'dre', $boost:5}, color: {$in:['black'], $boost:2 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 2.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1
it "score method throws if compound query", ->
collection = [
{ name:'dress', color:'red', price:100 }
{ name:'shoes', color:'black', price:120 }
{ name:'jacket', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$and: price: 100
$or: [
{color: 'red'}
{color: 'blue'}
]
it "score method throws if non $and query", ->
collection = [
{ name:'dress', color:'red', price:100 }
{ name:'shoes', color:'black', price:120 }
{ name:'jacket', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$or: [
{color: 'red'}
{color: 'blue'}
]
# not parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: 12}}
assert.equal result.length, 2
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
it "combination of $gt and $not", ->
a = create()
result = _query a, {likes: { $not: 2, $lt: 20}}
assert.equal result.length, 1
it "equal within an array (#21)", ->
tweets = [{
"entities": {
"user_mentions": [{
"id_str": "10228271"
}]
}
}, {
"entities": {
"user_mentions": [{
"id_str": "10228272"
}]
}
}]
res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
assert.equal(res.length, 1)
res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
assert.equal(res.length, 0)
it "compound $ands (#29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
| 22850 | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, colors: "blue"
assert.equal result.length, 2
result = _query a, colors: ["red", "blue"]
assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
result = _query a, score: {$lt: null}
assert.equal result.length, 0
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
result = _query a, score: {$lte: null}
assert.equal result.length, 0
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
result = _query a, likes: {$gt: null}
assert.equal result.length, 0
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
result = _query a, likes: {$gte: null}
assert.equal result.length, 0
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
result = _query a, likes: {$between: [null, 5]}
assert.equal result.length, 0
it "$between errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: []}
assert.throws ->
result = _query a, likes: {$between: [5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
result = _query a, likes: {$betweene: [null, 10]}
assert.equal result.length, 0
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$mod errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$mod: []}
assert.throws ->
result = _query a, likes: {$mod: [5]}
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$none operator", ->
a = create()
result = _query a, colors: {$none: ["yellow","blue"]}
assert.deepEqual result, [a[1]]
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "$options errors without regexp", ->
a = create()
assert.throws(->
result = _query a, content: {$options: 'i'}
)
it "$cb - callback", ->
a = create()
fn = (attr) ->
attr.charAt(0).toLowerCase() is "c"
result = _query a,
title: $cb: fn
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$cb - callback - checking 'this' is the model", ->
a = create()
result = _query a, title:
$cb: (attr) -> @title is "Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
query = _query.testWith(likes: $lt: -> count += 1)
result = _.filter(a, query)
assert.equal (result).length, 1
result = _.filter(a, query)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [{
x: {
$gt: 1
},
y: {
$lt: 4
}
}, {
foo: 1
}]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
for type in ["$and", "$or", "$nor"]
it type + " throws error when not an array", ->
a = create()
query = {}
query[type] = {
a: 1
}
assert.throws((-> _query(a, query)), Error);
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "<NAME>"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "<NAME>", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "<NAME>", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "<NAME>", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "test<NAME>", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it.skip "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"<NAME>"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with underscore chain", ->
a = create()
q =
$or: [
{title:"Home"}
{title:"About"}
]
result = _.chain(a).query(q).pluck("title").value()
assert.equal result.length, 2
assert.equal result[0], "Home"
assert.equal result[1], "About"
it "works with a getter property", ->
Backbone = require "backbone"
a = new Backbone.Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = _query a.models, {title:"about"}, "get"
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
it "can be mixed into backbone collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) -> _query @models, params, "get"
whereBy: (params) -> new @constructor @query(params)
buildQuery: -> _query.build @models, "get"
a = new Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = a.query {title:"about"}
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
result2 = a.whereBy {title:"about"}
assert.equal result2.length, 1
assert.equal result2.at(0).get("title"), "about"
assert.equal result2.pluck("title")[0], "about"
result3 = a.buildQuery().not(title:"test").run()
assert.equal result3.length, 1
assert.equal result3[0].get("title"), "about"
it "can be used for live collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) ->
if params
_query @models, params, "get"
else
_query.build @models, "get"
whereBy: (params) -> new @constructor @query(params)
setFilter: (parent, query) ->
check = _query.tester(query, "get")
@listenTo parent,
add: (model) -> if check(model) then @add(model)
remove: @remove
change: (model) ->
if check(model) then @add(model) else @remove(model)
@add _query(parent.models, query, "get")
parent = new Collection [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript"}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript"}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP"}
]
live = new Collection
live.setFilter parent, {likes:$gt:15}
assert.equal parent.length, 3
assert.equal live.length, 1
# Change Events
parent.at(0).set("likes",16)
assert.equal live.length, 2
parent.at(2).set("likes",2)
assert.equal live.length, 1
# Add to Parent
parent.add [{title:"New", likes:21}, {title:"New2", likes:3}]
assert.equal live.length, 2
assert.equal parent.length, 5
# Remove from Parent
parent.pop()
parent.pop()
assert.equal live.length, 1
it "buildQuery works in oo fashion", ->
a = create()
query = _query.build(a)
.and({likes: {$gt: 5}})
.or({content: {$like: "PHP"}})
.or({colors: {$contains: "yellow"}})
result = query.run()
assert.equal result.length, 2
result = _query.build()
.and(likes: $lt: 15)
.or(content: $like: "Dummy")
.or(featured: $exists: true)
.not(colors: $contains: "yellow")
.run(a)
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "works with seperate query args", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
query = _query.build(collection)
.and("title", "Home")
result = query.run()
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$computed", ->
Backbone = require "backbone"
class testModel extends Backbone.Model
full_name: -> "#{@get 'first_name'} #{@get 'last_name'}"
a = new testModel
first_name: "<NAME>"
last_name: "<NAME>"
b = new testModel
first_name: "<NAME>"
last_name: "<NAME>"
c = [a,b]
result = _query c,
full_name: $computed: "<NAME>"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "<NAME>"
result = _query c,
full_name: $computed: $likeI: "n sm"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "<NAME>"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "has a score method", ->
collection = [
{ name:'<NAME>', color:'red', price:100 }
{ name:'<NAME>', color:'black', price:120 }
{ name:'<NAME>', color:'blue', price:150 }
]
results = _query.score( collection, { price: {$lt:140}, color: {$in:['red', 'blue'] }})
assert.equal _.findWhere(results, {name:'dress'})._score, 1
assert.equal _.findWhere(results, {name:'shoes'})._score, 0.5
it "has a score method with a $boost operator", ->
collection = [
{ name:'<NAME>', color:'red', price:100 }
{ name:'<NAME>', color:'black', price:120 }
{ name:'<NAME>', color:'blue', price:150 }
]
results = _query.score( collection, { price: 100, color: {$in:['black'], $boost:3 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 0.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1.5
it "has a score method with a $boost operator - 2", ->
collection = [
{ name:'<NAME>', color:'red', price:100 }
{ name:'<NAME>', color:'black', price:120 }
{ name:'<NAME>', color:'blue', price:150 }
]
results = _query.score( collection, { name: {$like:'<NAME>', $boost:5}, color: {$in:['black'], $boost:2 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 2.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1
it "score method throws if compound query", ->
collection = [
{ name:'<NAME>', color:'red', price:100 }
{ name:'<NAME>', color:'black', price:120 }
{ name:'<NAME>', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$and: price: 100
$or: [
{color: 'red'}
{color: 'blue'}
]
it "score method throws if non $and query", ->
collection = [
{ name:'<NAME>', color:'red', price:100 }
{ name:'<NAME>', color:'black', price:120 }
{ name:'<NAME>', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$or: [
{color: 'red'}
{color: 'blue'}
]
# not parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: 12}}
assert.equal result.length, 2
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
it "combination of $gt and $not", ->
a = create()
result = _query a, {likes: { $not: 2, $lt: 20}}
assert.equal result.length, 1
it "equal within an array (#21)", ->
tweets = [{
"entities": {
"user_mentions": [{
"id_str": "10228271"
}]
}
}, {
"entities": {
"user_mentions": [{
"id_str": "10228272"
}]
}
}]
res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
assert.equal(res.length, 1)
res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
assert.equal(res.length, 0)
it "compound $ands (#29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
| true | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, colors: "blue"
assert.equal result.length, 2
result = _query a, colors: ["red", "blue"]
assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
result = _query a, score: {$lt: null}
assert.equal result.length, 0
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
result = _query a, score: {$lte: null}
assert.equal result.length, 0
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
result = _query a, likes: {$gt: null}
assert.equal result.length, 0
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
result = _query a, likes: {$gte: null}
assert.equal result.length, 0
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
result = _query a, likes: {$between: [null, 5]}
assert.equal result.length, 0
it "$between errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: []}
assert.throws ->
result = _query a, likes: {$between: [5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
result = _query a, likes: {$betweene: [null, 10]}
assert.equal result.length, 0
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$mod errors with not enough args", ->
a = create()
assert.throws ->
result = _query a, likes: {$mod: []}
assert.throws ->
result = _query a, likes: {$mod: [5]}
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$none operator", ->
a = create()
result = _query a, colors: {$none: ["yellow","blue"]}
assert.deepEqual result, [a[1]]
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "$options errors without regexp", ->
a = create()
assert.throws(->
result = _query a, content: {$options: 'i'}
)
it "$cb - callback", ->
a = create()
fn = (attr) ->
attr.charAt(0).toLowerCase() is "c"
result = _query a,
title: $cb: fn
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$cb - callback - checking 'this' is the model", ->
a = create()
result = _query a, title:
$cb: (attr) -> @title is "Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
query = _query.testWith(likes: $lt: -> count += 1)
result = _.filter(a, query)
assert.equal (result).length, 1
result = _.filter(a, query)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [{
x: {
$gt: 1
},
y: {
$lt: 4
}
}, {
foo: 1
}]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
for type in ["$and", "$or", "$nor"]
it type + " throws error when not an array", ->
a = create()
query = {}
query[type] = {
a: 1
}
assert.throws((-> _query(a, query)), Error);
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "PI:NAME:<NAME>END_PI"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "PI:NAME:<NAME>END_PI", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "PI:NAME:<NAME>END_PI", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "PI:NAME:<NAME>END_PI", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "testPI:NAME:<NAME>END_PI", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it.skip "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"PI:NAME:<NAME>END_PI"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with underscore chain", ->
a = create()
q =
$or: [
{title:"Home"}
{title:"About"}
]
result = _.chain(a).query(q).pluck("title").value()
assert.equal result.length, 2
assert.equal result[0], "Home"
assert.equal result[1], "About"
it "works with a getter property", ->
Backbone = require "backbone"
a = new Backbone.Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = _query a.models, {title:"about"}, "get"
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
it "can be mixed into backbone collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) -> _query @models, params, "get"
whereBy: (params) -> new @constructor @query(params)
buildQuery: -> _query.build @models, "get"
a = new Collection [
{id:1, title:"test"}
{id:2, title:"about"}
]
result = a.query {title:"about"}
assert.equal result.length, 1
assert.equal result[0].get("title"), "about"
result2 = a.whereBy {title:"about"}
assert.equal result2.length, 1
assert.equal result2.at(0).get("title"), "about"
assert.equal result2.pluck("title")[0], "about"
result3 = a.buildQuery().not(title:"test").run()
assert.equal result3.length, 1
assert.equal result3[0].get("title"), "about"
it "can be used for live collections", ->
Backbone = require "backbone"
class Collection extends Backbone.Collection
query: (params) ->
if params
_query @models, params, "get"
else
_query.build @models, "get"
whereBy: (params) -> new @constructor @query(params)
setFilter: (parent, query) ->
check = _query.tester(query, "get")
@listenTo parent,
add: (model) -> if check(model) then @add(model)
remove: @remove
change: (model) ->
if check(model) then @add(model) else @remove(model)
@add _query(parent.models, query, "get")
parent = new Collection [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript"}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript"}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP"}
]
live = new Collection
live.setFilter parent, {likes:$gt:15}
assert.equal parent.length, 3
assert.equal live.length, 1
# Change Events
parent.at(0).set("likes",16)
assert.equal live.length, 2
parent.at(2).set("likes",2)
assert.equal live.length, 1
# Add to Parent
parent.add [{title:"New", likes:21}, {title:"New2", likes:3}]
assert.equal live.length, 2
assert.equal parent.length, 5
# Remove from Parent
parent.pop()
parent.pop()
assert.equal live.length, 1
it "buildQuery works in oo fashion", ->
a = create()
query = _query.build(a)
.and({likes: {$gt: 5}})
.or({content: {$like: "PHP"}})
.or({colors: {$contains: "yellow"}})
result = query.run()
assert.equal result.length, 2
result = _query.build()
.and(likes: $lt: 15)
.or(content: $like: "Dummy")
.or(featured: $exists: true)
.not(colors: $contains: "yellow")
.run(a)
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "works with seperate query args", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
query = _query.build(collection)
.and("title", "Home")
result = query.run()
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$computed", ->
Backbone = require "backbone"
class testModel extends Backbone.Model
full_name: -> "#{@get 'first_name'} #{@get 'last_name'}"
a = new testModel
first_name: "PI:NAME:<NAME>END_PI"
last_name: "PI:NAME:<NAME>END_PI"
b = new testModel
first_name: "PI:NAME:<NAME>END_PI"
last_name: "PI:NAME:<NAME>END_PI"
c = [a,b]
result = _query c,
full_name: $computed: "PI:NAME:<NAME>END_PI"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "PI:NAME:<NAME>END_PI"
result = _query c,
full_name: $computed: $likeI: "n sm"
assert.equal result.length, 1
assert.equal result[0].get("first_name"), "PI:NAME:<NAME>END_PI"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "has a score method", ->
collection = [
{ name:'PI:NAME:<NAME>END_PI', color:'red', price:100 }
{ name:'PI:NAME:<NAME>END_PI', color:'black', price:120 }
{ name:'PI:NAME:<NAME>END_PI', color:'blue', price:150 }
]
results = _query.score( collection, { price: {$lt:140}, color: {$in:['red', 'blue'] }})
assert.equal _.findWhere(results, {name:'dress'})._score, 1
assert.equal _.findWhere(results, {name:'shoes'})._score, 0.5
it "has a score method with a $boost operator", ->
collection = [
{ name:'PI:NAME:<NAME>END_PI', color:'red', price:100 }
{ name:'PI:NAME:<NAME>END_PI', color:'black', price:120 }
{ name:'PI:NAME:<NAME>END_PI', color:'blue', price:150 }
]
results = _query.score( collection, { price: 100, color: {$in:['black'], $boost:3 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 0.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1.5
it "has a score method with a $boost operator - 2", ->
collection = [
{ name:'PI:NAME:<NAME>END_PI', color:'red', price:100 }
{ name:'PI:NAME:<NAME>END_PI', color:'black', price:120 }
{ name:'PI:NAME:<NAME>END_PI', color:'blue', price:150 }
]
results = _query.score( collection, { name: {$like:'PI:NAME:<NAME>END_PI', $boost:5}, color: {$in:['black'], $boost:2 }})
assert.equal _.findWhere(results, {name:'dress'})._score, 2.5
assert.equal _.findWhere(results, {name:'shoes'})._score, 1
it "score method throws if compound query", ->
collection = [
{ name:'PI:NAME:<NAME>END_PI', color:'red', price:100 }
{ name:'PI:NAME:<NAME>END_PI', color:'black', price:120 }
{ name:'PI:NAME:<NAME>END_PI', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$and: price: 100
$or: [
{color: 'red'}
{color: 'blue'}
]
it "score method throws if non $and query", ->
collection = [
{ name:'PI:NAME:<NAME>END_PI', color:'red', price:100 }
{ name:'PI:NAME:<NAME>END_PI', color:'black', price:120 }
{ name:'PI:NAME:<NAME>END_PI', color:'blue', price:150 }
]
assert.throws ->
_query.score collection,
$or: [
{color: 'red'}
{color: 'blue'}
]
# not parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
# This is parallel to MongoDB
it "$not operator - mongo style", ->
a = create()
result = _query a, {likes: {$not: 12}}
assert.equal result.length, 2
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
it "combination of $gt and $not", ->
a = create()
result = _query a, {likes: { $not: 2, $lt: 20}}
assert.equal result.length, 1
it "equal within an array (#21)", ->
tweets = [{
"entities": {
"user_mentions": [{
"id_str": "10228271"
}]
}
}, {
"entities": {
"user_mentions": [{
"id_str": "10228272"
}]
}
}]
res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
assert.equal(res.length, 1)
res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
assert.equal(res.length, 0)
it "compound $ands (#29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
|
[
{
"context": " chatid / 256 + '.' + chatid % 256\n pass: chatid + 20000\n button: '#FF0000'\n attached: '",
"end": 1155,
"score": 0.9989660978317261,
"start": 1141,
"tag": "PASSWORD",
"value": "chatid + 20000"
},
{
"context": "SET ?',\n id: userid\... | test/lib/db-init.coffee | underc0de/korex-server | 0 | database = require '../../src/services/database'
config = require '../../config/tests'
idRange = [config.idRange.min .. config.idRange.max]
chatRange = [config.chatRange.min .. config.chatRange.max]
#`chats`(`id`, `name`, `bg`, `language`, `desc`, `sc`, `ch`, `email`, `radio`, `pass`, `button`, `attached`, `pool`, `pools`)
Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `messages` WHERE id = ?', chatid)
).then( ->
return Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `chats` WHERE id = ?', chatid)
)
).then( ->
for userid in idRange
database.exec('DELETE FROM `users` WHERE id = ?', userid)
).then( ->
return Promise.all(
(
for chatid in chatRange
database.exec('INSERT INTO `chats` SET ?',
id: chatid
name: 'test ' + chatid
bg: 'http://xat.com/web_gear/background/xat_splash.jpg'
language: 'en'
desc: 'Tupucal description ' + chatid
sc: 'Welcome to chat ' + chatid
email: 'admin' + chatid + '@example.com'
radio: '127.0.' + chatid / 256 + '.' + chatid % 256
pass: chatid + 20000
button: '#FF0000'
attached: ''
)
)
)
).then( ->
return Promise.all(
(
for userid in idRange
database.exec('INSERT INTO `users` SET ?',
id: userid
username: 'unregistered'#'username' + userid
nickname: 'nickname' + userid
password: '123' + userid
avatar: userid
url: 'http://example.com/id?' + userid
email: 'mail' + userid + '@mail.example.com'
k: 'k_' + userid
k2: 'k2_' + userid
k3: 'k3_' + userid
bride: ''
xats: 0
days: 0
enabled: 'enabled'
dO: ''
loginKey: ''
)
)
)
).catch((err) ->
console.error '[ERROR] Error while initializing database ' + JSON.stringify(err)
process.exit(1)
).then ->
process.exit()
| 155361 | database = require '../../src/services/database'
config = require '../../config/tests'
idRange = [config.idRange.min .. config.idRange.max]
chatRange = [config.chatRange.min .. config.chatRange.max]
#`chats`(`id`, `name`, `bg`, `language`, `desc`, `sc`, `ch`, `email`, `radio`, `pass`, `button`, `attached`, `pool`, `pools`)
Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `messages` WHERE id = ?', chatid)
).then( ->
return Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `chats` WHERE id = ?', chatid)
)
).then( ->
for userid in idRange
database.exec('DELETE FROM `users` WHERE id = ?', userid)
).then( ->
return Promise.all(
(
for chatid in chatRange
database.exec('INSERT INTO `chats` SET ?',
id: chatid
name: 'test ' + chatid
bg: 'http://xat.com/web_gear/background/xat_splash.jpg'
language: 'en'
desc: 'Tupucal description ' + chatid
sc: 'Welcome to chat ' + chatid
email: 'admin' + chatid + '@example.com'
radio: '127.0.' + chatid / 256 + '.' + chatid % 256
pass: <PASSWORD>
button: '#FF0000'
attached: ''
)
)
)
).then( ->
return Promise.all(
(
for userid in idRange
database.exec('INSERT INTO `users` SET ?',
id: userid
username: 'unregistered'#'username' + userid
nickname: 'nickname' + userid
password: '<PASSWORD>' + userid
avatar: userid
url: 'http://example.com/id?' + userid
email: 'mail' + userid + '@<EMAIL>'
k: 'k_' + userid
k2: 'k2_' + userid
k3: 'k3_' + userid
bride: ''
xats: 0
days: 0
enabled: 'enabled'
dO: ''
loginKey: ''
)
)
)
).catch((err) ->
console.error '[ERROR] Error while initializing database ' + JSON.stringify(err)
process.exit(1)
).then ->
process.exit()
| true | database = require '../../src/services/database'
config = require '../../config/tests'
idRange = [config.idRange.min .. config.idRange.max]
chatRange = [config.chatRange.min .. config.chatRange.max]
#`chats`(`id`, `name`, `bg`, `language`, `desc`, `sc`, `ch`, `email`, `radio`, `pass`, `button`, `attached`, `pool`, `pools`)
Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `messages` WHERE id = ?', chatid)
).then( ->
return Promise.all(
for chatid in chatRange
database.exec('DELETE FROM `chats` WHERE id = ?', chatid)
)
).then( ->
for userid in idRange
database.exec('DELETE FROM `users` WHERE id = ?', userid)
).then( ->
return Promise.all(
(
for chatid in chatRange
database.exec('INSERT INTO `chats` SET ?',
id: chatid
name: 'test ' + chatid
bg: 'http://xat.com/web_gear/background/xat_splash.jpg'
language: 'en'
desc: 'Tupucal description ' + chatid
sc: 'Welcome to chat ' + chatid
email: 'admin' + chatid + '@example.com'
radio: '127.0.' + chatid / 256 + '.' + chatid % 256
pass: PI:PASSWORD:<PASSWORD>END_PI
button: '#FF0000'
attached: ''
)
)
)
).then( ->
return Promise.all(
(
for userid in idRange
database.exec('INSERT INTO `users` SET ?',
id: userid
username: 'unregistered'#'username' + userid
nickname: 'nickname' + userid
password: 'PI:PASSWORD:<PASSWORD>END_PI' + userid
avatar: userid
url: 'http://example.com/id?' + userid
email: 'mail' + userid + '@PI:EMAIL:<EMAIL>END_PI'
k: 'k_' + userid
k2: 'k2_' + userid
k3: 'k3_' + userid
bride: ''
xats: 0
days: 0
enabled: 'enabled'
dO: ''
loginKey: ''
)
)
)
).catch((err) ->
console.error '[ERROR] Error while initializing database ' + JSON.stringify(err)
process.exit(1)
).then ->
process.exit()
|
[
{
"context": "=================================\n# Copyright 2014 Hatio, Lab.\n# Licensed under The MIT License\n# http",
"end": 63,
"score": 0.5374237895011902,
"start": 62,
"tag": "NAME",
"value": "H"
}
] | src/backup/registry.coffee | heartyoh/infopik | 0 | # ==========================================
# Copyright 2014 Hatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
], (compose, advice)->
"use strict"
parseEventArgs = (instance, args) ->
end = args.length;
callback = args[--end] if typeof(args[end - 1]) is 'function'
--end if typeof(args[end - 1]) is 'object'
if end is 2
element = args[0]
type = args[1]
else
element = instance.node;
type = args[0]
{
element: element
type: type
callback: callback
}
matchEvent = (a, b) -> (a.element is b.element) and (a.type is b.type) and (!b.callback || a.callback is b.callback)
class Registry
constructor: ->
registry = this;
(this.reset = ->
this.components = []
this.allInstances = {}
this.events = []
).call(this);
class ComponentInfo
constructor: (component) ->
this.component = component
this.attachedTo = []
this.instances = {}
this.addInstance = (instance) ->
instanceInfo = new InstanceInfo(instance)
this.instances[instance.identity] = instanceInfo
this.attachedTo.push(instance.node)
instanceInfo
this.removeInstance = (instance) ->
delete this.instances[instance.identity]
indexOfNode = this.attachedTo.indexOf(instance.node)
(indexOfNode > -1) && this.attachedTo.splice(indexOfNode, 1)
if !Object.keys(this.instances).length
registry.removeComponentInfo(this)
this.isAttachedTo = (node) ->
this.attachedTo.indexOf(node) > -1;
class InstanceInfo
constructor: (instance) ->
this.instance = instance
this.events = []
this.addBind = (event) ->
this.events.push(event)
registry.events.push(event)
this.removeBind = (event) ->
for e, i in this.events
this.events.splice(i, 1) if matchEvent(e, event)
this.addInstance = (instance) ->
component = this.findComponentInfo(instance)
if !component
component = new ComponentInfo(instance.constructor)
this.components.push(component)
inst = component.addInstance(instance)
this.allInstances[instance.identity] = inst
component
this.removeInstance = (instance) ->
instInfo = this.findInstanceInfo(instance)
componentInfo = this.findComponentInfo(instance)
componentInfo && componentInfo.removeInstance(instance)
delete this.allInstances[instance.identity]
this.removeComponentInfo = (componentInfo) ->
index = this.components.indexOf(componentInfo)
(index > -1) && this.components.splice(index, 1)
this.findComponentInfo = (which) ->
component = if which.attachTo then which else which.constructor
for c in this.components
return c if c.component is component
null
this.findInstanceInfo = (instance) ->
this.allInstances[instance.identity] || null
this.getBoundEventNames = (instance) ->
return this.findInstanceInfo(instance).events.map((ev) -> return ev.type)
this.findInstanceInfoByNode = (node) ->
result = []
for own k, thisInstanceInfo of this.allInstances
result.push(thisInstanceInfo) if thisInstanceInfo.instance.node is node
result
this.on = (componentOn) ->
instance = registry.findInstanceInfo(this)
l = arguments.length
otherArgs = new Array(l - 1)
otherArgs[i - 1] = arguments[i] for i in [1..l - 1]
if instance
boundCallback = componentOn.apply(null, otherArgs)
if (boundCallback)
otherArgs[otherArgs.length - 1] = boundCallback
event = parseEventArgs(this, otherArgs)
instance.addBind(event)
this.off = ->
event = parseEventArgs(this, arguments)
instance = registry.findInstanceInfo(this)
instance.removeBind(event) if instance
registry.events.splice(i, 1) for e, i in registry.events when matchEvent(e, event)
this.trigger = ->
this.teardown = -> registry.removeInstance(this)
this.withRegistration = ->
compose.mixin this, [advice.withAdvice]
this.after 'initialize', -> registry.addInstance(this)
this.around 'on', registry.on
this.after 'off', registry.off
window.DEBUG && DEBUG.enabled && this.after('trigger', registry.trigger)
this.after 'teardown',
obj: registry
fnName: 'teardown'
new Registry
| 145409 | # ==========================================
# Copyright 2014 <NAME>atio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
], (compose, advice)->
"use strict"
parseEventArgs = (instance, args) ->
end = args.length;
callback = args[--end] if typeof(args[end - 1]) is 'function'
--end if typeof(args[end - 1]) is 'object'
if end is 2
element = args[0]
type = args[1]
else
element = instance.node;
type = args[0]
{
element: element
type: type
callback: callback
}
matchEvent = (a, b) -> (a.element is b.element) and (a.type is b.type) and (!b.callback || a.callback is b.callback)
class Registry
constructor: ->
registry = this;
(this.reset = ->
this.components = []
this.allInstances = {}
this.events = []
).call(this);
class ComponentInfo
constructor: (component) ->
this.component = component
this.attachedTo = []
this.instances = {}
this.addInstance = (instance) ->
instanceInfo = new InstanceInfo(instance)
this.instances[instance.identity] = instanceInfo
this.attachedTo.push(instance.node)
instanceInfo
this.removeInstance = (instance) ->
delete this.instances[instance.identity]
indexOfNode = this.attachedTo.indexOf(instance.node)
(indexOfNode > -1) && this.attachedTo.splice(indexOfNode, 1)
if !Object.keys(this.instances).length
registry.removeComponentInfo(this)
this.isAttachedTo = (node) ->
this.attachedTo.indexOf(node) > -1;
class InstanceInfo
constructor: (instance) ->
this.instance = instance
this.events = []
this.addBind = (event) ->
this.events.push(event)
registry.events.push(event)
this.removeBind = (event) ->
for e, i in this.events
this.events.splice(i, 1) if matchEvent(e, event)
this.addInstance = (instance) ->
component = this.findComponentInfo(instance)
if !component
component = new ComponentInfo(instance.constructor)
this.components.push(component)
inst = component.addInstance(instance)
this.allInstances[instance.identity] = inst
component
this.removeInstance = (instance) ->
instInfo = this.findInstanceInfo(instance)
componentInfo = this.findComponentInfo(instance)
componentInfo && componentInfo.removeInstance(instance)
delete this.allInstances[instance.identity]
this.removeComponentInfo = (componentInfo) ->
index = this.components.indexOf(componentInfo)
(index > -1) && this.components.splice(index, 1)
this.findComponentInfo = (which) ->
component = if which.attachTo then which else which.constructor
for c in this.components
return c if c.component is component
null
this.findInstanceInfo = (instance) ->
this.allInstances[instance.identity] || null
this.getBoundEventNames = (instance) ->
return this.findInstanceInfo(instance).events.map((ev) -> return ev.type)
this.findInstanceInfoByNode = (node) ->
result = []
for own k, thisInstanceInfo of this.allInstances
result.push(thisInstanceInfo) if thisInstanceInfo.instance.node is node
result
this.on = (componentOn) ->
instance = registry.findInstanceInfo(this)
l = arguments.length
otherArgs = new Array(l - 1)
otherArgs[i - 1] = arguments[i] for i in [1..l - 1]
if instance
boundCallback = componentOn.apply(null, otherArgs)
if (boundCallback)
otherArgs[otherArgs.length - 1] = boundCallback
event = parseEventArgs(this, otherArgs)
instance.addBind(event)
this.off = ->
event = parseEventArgs(this, arguments)
instance = registry.findInstanceInfo(this)
instance.removeBind(event) if instance
registry.events.splice(i, 1) for e, i in registry.events when matchEvent(e, event)
this.trigger = ->
this.teardown = -> registry.removeInstance(this)
this.withRegistration = ->
compose.mixin this, [advice.withAdvice]
this.after 'initialize', -> registry.addInstance(this)
this.around 'on', registry.on
this.after 'off', registry.off
window.DEBUG && DEBUG.enabled && this.after('trigger', registry.trigger)
this.after 'teardown',
obj: registry
fnName: 'teardown'
new Registry
| true | # ==========================================
# Copyright 2014 PI:NAME:<NAME>END_PIatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
], (compose, advice)->
"use strict"
parseEventArgs = (instance, args) ->
end = args.length;
callback = args[--end] if typeof(args[end - 1]) is 'function'
--end if typeof(args[end - 1]) is 'object'
if end is 2
element = args[0]
type = args[1]
else
element = instance.node;
type = args[0]
{
element: element
type: type
callback: callback
}
matchEvent = (a, b) -> (a.element is b.element) and (a.type is b.type) and (!b.callback || a.callback is b.callback)
class Registry
constructor: ->
registry = this;
(this.reset = ->
this.components = []
this.allInstances = {}
this.events = []
).call(this);
class ComponentInfo
constructor: (component) ->
this.component = component
this.attachedTo = []
this.instances = {}
this.addInstance = (instance) ->
instanceInfo = new InstanceInfo(instance)
this.instances[instance.identity] = instanceInfo
this.attachedTo.push(instance.node)
instanceInfo
this.removeInstance = (instance) ->
delete this.instances[instance.identity]
indexOfNode = this.attachedTo.indexOf(instance.node)
(indexOfNode > -1) && this.attachedTo.splice(indexOfNode, 1)
if !Object.keys(this.instances).length
registry.removeComponentInfo(this)
this.isAttachedTo = (node) ->
this.attachedTo.indexOf(node) > -1;
class InstanceInfo
constructor: (instance) ->
this.instance = instance
this.events = []
this.addBind = (event) ->
this.events.push(event)
registry.events.push(event)
this.removeBind = (event) ->
for e, i in this.events
this.events.splice(i, 1) if matchEvent(e, event)
this.addInstance = (instance) ->
component = this.findComponentInfo(instance)
if !component
component = new ComponentInfo(instance.constructor)
this.components.push(component)
inst = component.addInstance(instance)
this.allInstances[instance.identity] = inst
component
this.removeInstance = (instance) ->
instInfo = this.findInstanceInfo(instance)
componentInfo = this.findComponentInfo(instance)
componentInfo && componentInfo.removeInstance(instance)
delete this.allInstances[instance.identity]
this.removeComponentInfo = (componentInfo) ->
index = this.components.indexOf(componentInfo)
(index > -1) && this.components.splice(index, 1)
this.findComponentInfo = (which) ->
component = if which.attachTo then which else which.constructor
for c in this.components
return c if c.component is component
null
this.findInstanceInfo = (instance) ->
this.allInstances[instance.identity] || null
this.getBoundEventNames = (instance) ->
return this.findInstanceInfo(instance).events.map((ev) -> return ev.type)
this.findInstanceInfoByNode = (node) ->
result = []
for own k, thisInstanceInfo of this.allInstances
result.push(thisInstanceInfo) if thisInstanceInfo.instance.node is node
result
this.on = (componentOn) ->
instance = registry.findInstanceInfo(this)
l = arguments.length
otherArgs = new Array(l - 1)
otherArgs[i - 1] = arguments[i] for i in [1..l - 1]
if instance
boundCallback = componentOn.apply(null, otherArgs)
if (boundCallback)
otherArgs[otherArgs.length - 1] = boundCallback
event = parseEventArgs(this, otherArgs)
instance.addBind(event)
this.off = ->
event = parseEventArgs(this, arguments)
instance = registry.findInstanceInfo(this)
instance.removeBind(event) if instance
registry.events.splice(i, 1) for e, i in registry.events when matchEvent(e, event)
this.trigger = ->
this.teardown = -> registry.removeInstance(this)
this.withRegistration = ->
compose.mixin this, [advice.withAdvice]
this.after 'initialize', -> registry.addInstance(this)
this.around 'on', registry.on
this.after 'off', registry.off
window.DEBUG && DEBUG.enabled && this.after('trigger', registry.trigger)
this.after 'teardown',
obj: registry
fnName: 'teardown'
new Registry
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999107122421265,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/admin/contest/user-entry.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { UserEntryDeleteButton } from './user-entry-delete-button'
import * as React from 'react'
import { tr, td, a, div } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
export class UserEntry extends React.Component
render: =>
className = 'osu-table__body-row osu-table__body-row--highlightable admin-contest-entry'
className += ' admin-contest-entry__deleted' if @props.entry.deleted
tr
className: className
key: @props.entry.id,
td className: 'osu-table__cell admin-contest-entry__column',
a
className: 'admin-contest-entry__user-link'
href: laroute.route('users.show', user: @props.entry.user.id),
div className: 'admin-contest-entry__avatar',
el UserAvatar, user: @props.entry.user, modifiers: ['full-rounded']
@props.entry.user.username
td className: 'osu-table__cell admin-contest-entry__column',
a download: @props.entry.original_filename, href: @props.entry.url, @props.entry.filename
td className: 'osu-table__cell admin-contest-entry__column',
osu.formatBytes(@props.entry.filesize)
td className: 'admin-contest-entry__column admin-contest-entry__column--button',
el UserEntryDeleteButton,
entry: @props.entry
| 141590 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { UserEntryDeleteButton } from './user-entry-delete-button'
import * as React from 'react'
import { tr, td, a, div } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
export class UserEntry extends React.Component
render: =>
className = 'osu-table__body-row osu-table__body-row--highlightable admin-contest-entry'
className += ' admin-contest-entry__deleted' if @props.entry.deleted
tr
className: className
key: @props.entry.id,
td className: 'osu-table__cell admin-contest-entry__column',
a
className: 'admin-contest-entry__user-link'
href: laroute.route('users.show', user: @props.entry.user.id),
div className: 'admin-contest-entry__avatar',
el UserAvatar, user: @props.entry.user, modifiers: ['full-rounded']
@props.entry.user.username
td className: 'osu-table__cell admin-contest-entry__column',
a download: @props.entry.original_filename, href: @props.entry.url, @props.entry.filename
td className: 'osu-table__cell admin-contest-entry__column',
osu.formatBytes(@props.entry.filesize)
td className: 'admin-contest-entry__column admin-contest-entry__column--button',
el UserEntryDeleteButton,
entry: @props.entry
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { UserEntryDeleteButton } from './user-entry-delete-button'
import * as React from 'react'
import { tr, td, a, div } from 'react-dom-factories'
import { UserAvatar } from 'user-avatar'
el = React.createElement
export class UserEntry extends React.Component
render: =>
className = 'osu-table__body-row osu-table__body-row--highlightable admin-contest-entry'
className += ' admin-contest-entry__deleted' if @props.entry.deleted
tr
className: className
key: @props.entry.id,
td className: 'osu-table__cell admin-contest-entry__column',
a
className: 'admin-contest-entry__user-link'
href: laroute.route('users.show', user: @props.entry.user.id),
div className: 'admin-contest-entry__avatar',
el UserAvatar, user: @props.entry.user, modifiers: ['full-rounded']
@props.entry.user.username
td className: 'osu-table__cell admin-contest-entry__column',
a download: @props.entry.original_filename, href: @props.entry.url, @props.entry.filename
td className: 'osu-table__cell admin-contest-entry__column',
osu.formatBytes(@props.entry.filesize)
td className: 'admin-contest-entry__column admin-contest-entry__column--button',
el UserEntryDeleteButton,
entry: @props.entry
|
[
{
"context": "k.BannerPlugin \"vk-api v#{packageInfo.version} (c) Nikita Litvin, git.io/vwqn6\"\n ]\n",
"end": 699,
"score": 0.9998891949653625,
"start": 686,
"tag": "NAME",
"value": "Nikita Litvin"
},
{
"context": "v#{packageInfo.version} (c) Nikita Litvin, git.io/vwqn6\"\n ]\n",
... | webpack.config.coffee | vk-x/vk-api | 4 | webpack = require "webpack"
packageInfo = require "./package.json"
module.exports =
entry:
"vk-api": "./src/index.coffee"
"vk-api.min": "./src/index.coffee"
"vk-api-no-shortcuts": "./src/index-no-shortcuts.coffee"
"vk-api-no-shortcuts.min": "./src/index-no-shortcuts.coffee"
output:
path: "./dist"
filename: "[name].js"
library: "vk"
libraryTarget: "umd"
module:
loaders: [
test: /\.coffee$/
loader: "coffee"
]
resolve:
extensions: [
""
".coffee"
]
plugins: [
new webpack.optimize.UglifyJsPlugin
test: /\.min\.js($|\?)/i
new webpack.BannerPlugin "vk-api v#{packageInfo.version} (c) Nikita Litvin, git.io/vwqn6"
]
| 110827 | webpack = require "webpack"
packageInfo = require "./package.json"
module.exports =
entry:
"vk-api": "./src/index.coffee"
"vk-api.min": "./src/index.coffee"
"vk-api-no-shortcuts": "./src/index-no-shortcuts.coffee"
"vk-api-no-shortcuts.min": "./src/index-no-shortcuts.coffee"
output:
path: "./dist"
filename: "[name].js"
library: "vk"
libraryTarget: "umd"
module:
loaders: [
test: /\.coffee$/
loader: "coffee"
]
resolve:
extensions: [
""
".coffee"
]
plugins: [
new webpack.optimize.UglifyJsPlugin
test: /\.min\.js($|\?)/i
new webpack.BannerPlugin "vk-api v#{packageInfo.version} (c) <NAME>, git.io/vwqn6"
]
| true | webpack = require "webpack"
packageInfo = require "./package.json"
module.exports =
entry:
"vk-api": "./src/index.coffee"
"vk-api.min": "./src/index.coffee"
"vk-api-no-shortcuts": "./src/index-no-shortcuts.coffee"
"vk-api-no-shortcuts.min": "./src/index-no-shortcuts.coffee"
output:
path: "./dist"
filename: "[name].js"
library: "vk"
libraryTarget: "umd"
module:
loaders: [
test: /\.coffee$/
loader: "coffee"
]
resolve:
extensions: [
""
".coffee"
]
plugins: [
new webpack.optimize.UglifyJsPlugin
test: /\.min\.js($|\?)/i
new webpack.BannerPlugin "vk-api v#{packageInfo.version} (c) PI:NAME:<NAME>END_PI, git.io/vwqn6"
]
|
[
{
"context": "ame: (username, options) ->\n\t\tquery =\n\t\t\tusername: username\n\n\t\treturn @findOne query, options\n\n\tfindOneByEmai",
"end": 553,
"score": 0.8897709250450134,
"start": 545,
"tag": "USERNAME",
"value": "username"
},
{
"context": "\t\t$or: [\n\t\t\t\t{name: nameOr... | packages/rocketchat-lib/server/models/Users.coffee | org100h1/Rocket.Panda | 0 | RocketChat.models.Users = new class extends RocketChat.models._Base
constructor: ->
@model = Meteor.users
@tryEnsureIndex { 'roles': 1 }, { sparse: 1 }
@tryEnsureIndex { 'name': 1 }
@tryEnsureIndex { 'lastLogin': 1 }
@tryEnsureIndex { 'status': 1 }
@tryEnsureIndex { 'active': 1 }, { sparse: 1 }
@tryEnsureIndex { 'statusConnection': 1 }, { sparse: 1 }
@tryEnsureIndex { 'type': 1 }
# FIND ONE
findOneById: (_id, options) ->
return @findOne _id, options
findOneByUsername: (username, options) ->
query =
username: username
return @findOne query, options
findOneByEmailAddress: (emailAddress, options) ->
query =
'emails.address': emailAddress
return @findOne query, options
findOneVerifiedFromSameDomain: (email, options) ->
domain = s.strRight(email, '@')
query =
emails:
$elemMatch:
address:
$regex: new RegExp "@" + domain + "$", "i"
$ne: email
verified: true
return @findOne query, options
findOneAdmin: (admin, options) ->
query =
admin: admin
return @findOne query, options
findOneByIdAndLoginToken: (_id, token, options) ->
query =
_id: _id
'services.resume.loginTokens.hashedToken' : Accounts._hashLoginToken(token)
return @findOne query, options
# FIND
findUsersNotOffline: (options) ->
query =
username:
$exists: 1
status:
$in: ['online', 'away', 'busy']
return @find query, options
findByUsername: (username, options) ->
query =
username: username
return @find query, options
findUsersByUsernamesWithHighlights: (usernames, options) ->
query =
username: { $in: usernames }
'settings.preferences.highlights':
$exists: true
return @find query, options
findActiveByUsernameRegexWithExceptions: (username, exceptions = [], options = {}) ->
if not _.isArray exceptions
exceptions = [ exceptions ]
usernameRegex = new RegExp username, "i"
query =
$and: [
{ active: true }
{ username: { $nin: exceptions } }
{ username: usernameRegex }
]
type:
$in: ['user', 'bot']
return @find query, options
findByActiveUsersUsernameExcept: (username, except, options) ->
query =
active: true
$and: [
{username: {$nin: except}}
{username: username}
]
return @find query, options
findUsersByNameOrUsername: (nameOrUsername, options) ->
query =
username:
$exists: 1
$or: [
{name: nameOrUsername}
{username: nameOrUsername}
]
type:
$in: ['user']
return @find query, options
findByUsernameNameOrEmailAddress: (usernameNameOrEmailAddress, options) ->
query =
$or: [
{name: usernameNameOrEmailAddress}
{username: usernameNameOrEmailAddress}
{'emails.address': usernameNameOrEmailAddress}
]
type:
$in: ['user', 'bot']
return @find query, options
findLDAPUsers: (options) ->
query =
ldap: true
return @find query, options
getLastLogin: (options = {}) ->
query = { lastLogin: { $exists: 1 } }
options.sort = { lastLogin: -1 }
options.limit = 1
return @find(query, options)?.fetch?()?[0]?.lastLogin
findUsersByUsernames: (usernames, options) ->
query =
username:
$in: usernames
return @find query, options
# UPDATE
updateLastLoginById: (_id) ->
update =
$set:
lastLogin: new Date
return @update _id, update
setServiceId: (_id, serviceName, serviceId) ->
update =
$set: {}
serviceIdKey = "services.#{serviceName}.id"
update.$set[serviceIdKey] = serviceId
return @update _id, update
setUsername: (_id, username) ->
update =
$set: username: username
return @update _id, update
setEmail: (_id, email) ->
update =
$set:
emails: [
address: email
verified: false
]
return @update _id, update
setEmailVerified: (_id, email) ->
query =
_id: _id
emails:
$elemMatch:
address: email
verified: false
update =
$set:
'emails.$.verified': true
return @update query, update
setName: (_id, name) ->
update =
$set:
name: name
return @update _id, update
setAvatarOrigin: (_id, origin) ->
update =
$set:
avatarOrigin: origin
return @update _id, update
unsetAvatarOrigin: (_id) ->
update =
$unset:
avatarOrigin: 1
return @update _id, update
setUserActive: (_id, active=true) ->
update =
$set:
active: active
return @update _id, update
setAllUsersActive: (active) ->
update =
$set:
active: active
return @update {}, update, { multi: true }
unsetLoginTokens: (_id) ->
update =
$set:
"services.resume.loginTokens" : []
return @update _id, update
unsetRequirePasswordChange: (_id) ->
update =
$unset:
"requirePasswordChange" : true
"requirePasswordChangeReason" : true
return @update _id, update
resetPasswordAndSetRequirePasswordChange: (_id, requirePasswordChange, requirePasswordChangeReason) ->
update =
$unset:
"services.password": 1
$set:
"requirePasswordChange" : requirePasswordChange,
"requirePasswordChangeReason": requirePasswordChangeReason
return @update _id, update
setLanguage: (_id, language) ->
update =
$set:
language: language
return @update _id, update
setProfile: (_id, profile) ->
update =
$set:
"settings.profile": profile
return @update _id, update
setPreferences: (_id, preferences) ->
update =
$set:
"settings.preferences": preferences
return @update _id, update
setUtcOffset: (_id, utcOffset) ->
query =
_id: _id
utcOffset:
$ne: utcOffset
update =
$set:
utcOffset: utcOffset
return @update query, update
saveUserById: (_id, data) ->
setData = {}
unsetData = {}
if data.name?
if not _.isEmpty(s.trim(data.name))
setData.name = s.trim(data.name)
else
unsetData.name = 1
if data.email?
if not _.isEmpty(s.trim(data.email))
setData.emails = [
address: s.trim(data.email)
]
else
unsetData.name = 1
if data.phone?
if not _.isEmpty(s.trim(data.phone))
setData.phone = [
phoneNumber: s.trim(data.phone)
]
else
unsetData.phone = 1
update = {}
if not _.isEmpty setData
update.$set = setData
if not _.isEmpty unsetData
update.$unset = unsetData
return @update { _id: _id }, update
# INSERT
create: (data) ->
user =
createdAt: new Date
avatarOrigin: 'none'
_.extend user, data
return @insert user
# REMOVE
removeById: (_id) ->
return @remove _id
###
Find users to send a message by email if:
- he is not online
- has a verified email
- has not disabled email notifications
###
getUsersToSendOfflineEmail: (usersIds) ->
query =
_id:
$in: usersIds
status: 'offline'
statusConnection:
$ne: 'online'
'emails.verified': true
return @find query, { fields: { name: 1, username: 1, emails: 1, 'settings.preferences.emailNotificationMode': 1 } }
| 183382 | RocketChat.models.Users = new class extends RocketChat.models._Base
constructor: ->
@model = Meteor.users
@tryEnsureIndex { 'roles': 1 }, { sparse: 1 }
@tryEnsureIndex { 'name': 1 }
@tryEnsureIndex { 'lastLogin': 1 }
@tryEnsureIndex { 'status': 1 }
@tryEnsureIndex { 'active': 1 }, { sparse: 1 }
@tryEnsureIndex { 'statusConnection': 1 }, { sparse: 1 }
@tryEnsureIndex { 'type': 1 }
# FIND ONE
findOneById: (_id, options) ->
return @findOne _id, options
findOneByUsername: (username, options) ->
query =
username: username
return @findOne query, options
findOneByEmailAddress: (emailAddress, options) ->
query =
'emails.address': emailAddress
return @findOne query, options
findOneVerifiedFromSameDomain: (email, options) ->
domain = s.strRight(email, '@')
query =
emails:
$elemMatch:
address:
$regex: new RegExp "@" + domain + "$", "i"
$ne: email
verified: true
return @findOne query, options
findOneAdmin: (admin, options) ->
query =
admin: admin
return @findOne query, options
findOneByIdAndLoginToken: (_id, token, options) ->
query =
_id: _id
'services.resume.loginTokens.hashedToken' : Accounts._hashLoginToken(token)
return @findOne query, options
# FIND
findUsersNotOffline: (options) ->
query =
username:
$exists: 1
status:
$in: ['online', 'away', 'busy']
return @find query, options
findByUsername: (username, options) ->
query =
username: username
return @find query, options
findUsersByUsernamesWithHighlights: (usernames, options) ->
query =
username: { $in: usernames }
'settings.preferences.highlights':
$exists: true
return @find query, options
findActiveByUsernameRegexWithExceptions: (username, exceptions = [], options = {}) ->
if not _.isArray exceptions
exceptions = [ exceptions ]
usernameRegex = new RegExp username, "i"
query =
$and: [
{ active: true }
{ username: { $nin: exceptions } }
{ username: usernameRegex }
]
type:
$in: ['user', 'bot']
return @find query, options
findByActiveUsersUsernameExcept: (username, except, options) ->
query =
active: true
$and: [
{username: {$nin: except}}
{username: username}
]
return @find query, options
findUsersByNameOrUsername: (nameOrUsername, options) ->
query =
username:
$exists: 1
$or: [
{name: nameOrUsername}
{username: nameOrUsername}
]
type:
$in: ['user']
return @find query, options
findByUsernameNameOrEmailAddress: (usernameNameOrEmailAddress, options) ->
query =
$or: [
{name: usernameNameOrEmailAddress}
{username: usernameNameOrEmailAddress}
{'emails.address': usernameNameOrEmailAddress}
]
type:
$in: ['user', 'bot']
return @find query, options
findLDAPUsers: (options) ->
query =
ldap: true
return @find query, options
getLastLogin: (options = {}) ->
query = { lastLogin: { $exists: 1 } }
options.sort = { lastLogin: -1 }
options.limit = 1
return @find(query, options)?.fetch?()?[0]?.lastLogin
findUsersByUsernames: (usernames, options) ->
query =
username:
$in: usernames
return @find query, options
# UPDATE
updateLastLoginById: (_id) ->
update =
$set:
lastLogin: new Date
return @update _id, update
setServiceId: (_id, serviceName, serviceId) ->
update =
$set: {}
serviceIdKey = "<KEY>
update.$set[serviceIdKey] = serviceId
return @update _id, update
setUsername: (_id, username) ->
update =
$set: username: username
return @update _id, update
setEmail: (_id, email) ->
update =
$set:
emails: [
address: email
verified: false
]
return @update _id, update
setEmailVerified: (_id, email) ->
query =
_id: _id
emails:
$elemMatch:
address: email
verified: false
update =
$set:
'emails.$.verified': true
return @update query, update
setName: (_id, name) ->
update =
$set:
name: name
return @update _id, update
setAvatarOrigin: (_id, origin) ->
update =
$set:
avatarOrigin: origin
return @update _id, update
unsetAvatarOrigin: (_id) ->
update =
$unset:
avatarOrigin: 1
return @update _id, update
setUserActive: (_id, active=true) ->
update =
$set:
active: active
return @update _id, update
setAllUsersActive: (active) ->
update =
$set:
active: active
return @update {}, update, { multi: true }
unsetLoginTokens: (_id) ->
update =
$set:
"services.resume.loginTokens" : []
return @update _id, update
unsetRequirePasswordChange: (_id) ->
update =
$unset:
"requirePasswordChange" : true
"requirePasswordChangeReason" : true
return @update _id, update
resetPasswordAndSetRequirePasswordChange: (_id, requirePasswordChange, requirePasswordChangeReason) ->
update =
$unset:
"services.password": 1
$set:
"requirePasswordChange" : requirePasswordChange,
"requirePasswordChangeReason": requirePasswordChangeReason
return @update _id, update
setLanguage: (_id, language) ->
update =
$set:
language: language
return @update _id, update
setProfile: (_id, profile) ->
update =
$set:
"settings.profile": profile
return @update _id, update
setPreferences: (_id, preferences) ->
update =
$set:
"settings.preferences": preferences
return @update _id, update
setUtcOffset: (_id, utcOffset) ->
query =
_id: _id
utcOffset:
$ne: utcOffset
update =
$set:
utcOffset: utcOffset
return @update query, update
saveUserById: (_id, data) ->
setData = {}
unsetData = {}
if data.name?
if not _.isEmpty(s.trim(data.name))
setData.name = s.trim(data.name)
else
unsetData.name = 1
if data.email?
if not _.isEmpty(s.trim(data.email))
setData.emails = [
address: s.trim(data.email)
]
else
unsetData.name = 1
if data.phone?
if not _.isEmpty(s.trim(data.phone))
setData.phone = [
phoneNumber: s.trim(data.phone)
]
else
unsetData.phone = 1
update = {}
if not _.isEmpty setData
update.$set = setData
if not _.isEmpty unsetData
update.$unset = unsetData
return @update { _id: _id }, update
# INSERT
create: (data) ->
user =
createdAt: new Date
avatarOrigin: 'none'
_.extend user, data
return @insert user
# REMOVE
removeById: (_id) ->
return @remove _id
###
Find users to send a message by email if:
- he is not online
- has a verified email
- has not disabled email notifications
###
getUsersToSendOfflineEmail: (usersIds) ->
query =
_id:
$in: usersIds
status: 'offline'
statusConnection:
$ne: 'online'
'emails.verified': true
return @find query, { fields: { name: 1, username: 1, emails: 1, 'settings.preferences.emailNotificationMode': 1 } }
| true | RocketChat.models.Users = new class extends RocketChat.models._Base
constructor: ->
@model = Meteor.users
@tryEnsureIndex { 'roles': 1 }, { sparse: 1 }
@tryEnsureIndex { 'name': 1 }
@tryEnsureIndex { 'lastLogin': 1 }
@tryEnsureIndex { 'status': 1 }
@tryEnsureIndex { 'active': 1 }, { sparse: 1 }
@tryEnsureIndex { 'statusConnection': 1 }, { sparse: 1 }
@tryEnsureIndex { 'type': 1 }
# FIND ONE
findOneById: (_id, options) ->
return @findOne _id, options
findOneByUsername: (username, options) ->
query =
username: username
return @findOne query, options
findOneByEmailAddress: (emailAddress, options) ->
query =
'emails.address': emailAddress
return @findOne query, options
findOneVerifiedFromSameDomain: (email, options) ->
domain = s.strRight(email, '@')
query =
emails:
$elemMatch:
address:
$regex: new RegExp "@" + domain + "$", "i"
$ne: email
verified: true
return @findOne query, options
findOneAdmin: (admin, options) ->
query =
admin: admin
return @findOne query, options
findOneByIdAndLoginToken: (_id, token, options) ->
query =
_id: _id
'services.resume.loginTokens.hashedToken' : Accounts._hashLoginToken(token)
return @findOne query, options
# FIND
findUsersNotOffline: (options) ->
query =
username:
$exists: 1
status:
$in: ['online', 'away', 'busy']
return @find query, options
findByUsername: (username, options) ->
query =
username: username
return @find query, options
findUsersByUsernamesWithHighlights: (usernames, options) ->
query =
username: { $in: usernames }
'settings.preferences.highlights':
$exists: true
return @find query, options
findActiveByUsernameRegexWithExceptions: (username, exceptions = [], options = {}) ->
if not _.isArray exceptions
exceptions = [ exceptions ]
usernameRegex = new RegExp username, "i"
query =
$and: [
{ active: true }
{ username: { $nin: exceptions } }
{ username: usernameRegex }
]
type:
$in: ['user', 'bot']
return @find query, options
findByActiveUsersUsernameExcept: (username, except, options) ->
query =
active: true
$and: [
{username: {$nin: except}}
{username: username}
]
return @find query, options
findUsersByNameOrUsername: (nameOrUsername, options) ->
query =
username:
$exists: 1
$or: [
{name: nameOrUsername}
{username: nameOrUsername}
]
type:
$in: ['user']
return @find query, options
findByUsernameNameOrEmailAddress: (usernameNameOrEmailAddress, options) ->
query =
$or: [
{name: usernameNameOrEmailAddress}
{username: usernameNameOrEmailAddress}
{'emails.address': usernameNameOrEmailAddress}
]
type:
$in: ['user', 'bot']
return @find query, options
findLDAPUsers: (options) ->
query =
ldap: true
return @find query, options
getLastLogin: (options = {}) ->
query = { lastLogin: { $exists: 1 } }
options.sort = { lastLogin: -1 }
options.limit = 1
return @find(query, options)?.fetch?()?[0]?.lastLogin
findUsersByUsernames: (usernames, options) ->
query =
username:
$in: usernames
return @find query, options
# UPDATE
updateLastLoginById: (_id) ->
update =
$set:
lastLogin: new Date
return @update _id, update
setServiceId: (_id, serviceName, serviceId) ->
update =
$set: {}
serviceIdKey = "PI:KEY:<KEY>END_PI
update.$set[serviceIdKey] = serviceId
return @update _id, update
setUsername: (_id, username) ->
update =
$set: username: username
return @update _id, update
setEmail: (_id, email) ->
update =
$set:
emails: [
address: email
verified: false
]
return @update _id, update
setEmailVerified: (_id, email) ->
query =
_id: _id
emails:
$elemMatch:
address: email
verified: false
update =
$set:
'emails.$.verified': true
return @update query, update
setName: (_id, name) ->
update =
$set:
name: name
return @update _id, update
setAvatarOrigin: (_id, origin) ->
update =
$set:
avatarOrigin: origin
return @update _id, update
unsetAvatarOrigin: (_id) ->
update =
$unset:
avatarOrigin: 1
return @update _id, update
setUserActive: (_id, active=true) ->
update =
$set:
active: active
return @update _id, update
setAllUsersActive: (active) ->
update =
$set:
active: active
return @update {}, update, { multi: true }
unsetLoginTokens: (_id) ->
update =
$set:
"services.resume.loginTokens" : []
return @update _id, update
unsetRequirePasswordChange: (_id) ->
update =
$unset:
"requirePasswordChange" : true
"requirePasswordChangeReason" : true
return @update _id, update
resetPasswordAndSetRequirePasswordChange: (_id, requirePasswordChange, requirePasswordChangeReason) ->
update =
$unset:
"services.password": 1
$set:
"requirePasswordChange" : requirePasswordChange,
"requirePasswordChangeReason": requirePasswordChangeReason
return @update _id, update
setLanguage: (_id, language) ->
update =
$set:
language: language
return @update _id, update
setProfile: (_id, profile) ->
update =
$set:
"settings.profile": profile
return @update _id, update
setPreferences: (_id, preferences) ->
update =
$set:
"settings.preferences": preferences
return @update _id, update
setUtcOffset: (_id, utcOffset) ->
query =
_id: _id
utcOffset:
$ne: utcOffset
update =
$set:
utcOffset: utcOffset
return @update query, update
saveUserById: (_id, data) ->
setData = {}
unsetData = {}
if data.name?
if not _.isEmpty(s.trim(data.name))
setData.name = s.trim(data.name)
else
unsetData.name = 1
if data.email?
if not _.isEmpty(s.trim(data.email))
setData.emails = [
address: s.trim(data.email)
]
else
unsetData.name = 1
if data.phone?
if not _.isEmpty(s.trim(data.phone))
setData.phone = [
phoneNumber: s.trim(data.phone)
]
else
unsetData.phone = 1
update = {}
if not _.isEmpty setData
update.$set = setData
if not _.isEmpty unsetData
update.$unset = unsetData
return @update { _id: _id }, update
# INSERT
create: (data) ->
user =
createdAt: new Date
avatarOrigin: 'none'
_.extend user, data
return @insert user
# REMOVE
removeById: (_id) ->
return @remove _id
###
Find users to send a message by email if:
- he is not online
- has a verified email
- has not disabled email notifications
###
getUsersToSendOfflineEmail: (usersIds) ->
query =
_id:
$in: usersIds
status: 'offline'
statusConnection:
$ne: 'online'
'emails.verified': true
return @find query, { fields: { name: 1, username: 1, emails: 1, 'settings.preferences.emailNotificationMode': 1 } }
|
[
{
"context": "().min(3).max(30).required()\n password: joi.string().alphanum().min(6).max(30).required()\n ",
"end": 393,
"score": 0.8230852484703064,
"start": 392,
"tag": "PASSWORD",
"value": "j"
},
{
"context": "().min(3).max(30).required()\n password: joi.str... | page/users.coffee | dongmingchao/nsite-hexo | 0 | import Router from 'koa-joi-router'
joi = Router.Joi
import models from '../model/index'
app = new Router()
app.post '/user/register',
meta:
swagger:
summary: '用户注册'
description: '注册'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: joi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id:''
handler: (ctx) ->
ctx.body = models.User.add ctx.request.body
app.post '/user/login',
meta:
swagger:
summary: '用户登陆'
description: '用户验证登陆'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: joi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id: 'ba ba ba'
handler: (ctx) ->
ctx.body = 'user id'
export default app | 119078 | import Router from 'koa-joi-router'
joi = Router.Joi
import models from '../model/index'
app = new Router()
app.post '/user/register',
meta:
swagger:
summary: '用户注册'
description: '注册'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: <PASSWORD>oi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id:''
handler: (ctx) ->
ctx.body = models.User.add ctx.request.body
app.post '/user/login',
meta:
swagger:
summary: '用户登陆'
description: '用户验证登陆'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: <PASSWORD>oi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id: 'ba ba ba'
handler: (ctx) ->
ctx.body = 'user id'
export default app | true | import Router from 'koa-joi-router'
joi = Router.Joi
import models from '../model/index'
app = new Router()
app.post '/user/register',
meta:
swagger:
summary: '用户注册'
description: '注册'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: PI:PASSWORD:<PASSWORD>END_PIoi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id:''
handler: (ctx) ->
ctx.body = models.User.add ctx.request.body
app.post '/user/login',
meta:
swagger:
summary: '用户登陆'
description: '用户验证登陆'
tags: ['用户管理']
validate:
type: 'json'
body:
username: joi.string().alphanum().min(3).max(30).required()
password: PI:PASSWORD:<PASSWORD>END_PIoi.string().alphanum().min(6).max(30).required()
output:
200:
body:
id: 'ba ba ba'
handler: (ctx) ->
ctx.body = 'user id'
export default app |
[
{
"context": "rough Redis pub/sub using Synapse.\n#\n# Author:\n# AndrewGuenther\n\nRedis = require \"redis\"\nUrl = require \"url\"\n\nmod",
"end": 508,
"score": 0.9997508525848389,
"start": 494,
"tag": "NAME",
"value": "AndrewGuenther"
}
] | scripts/synapse.coffee | Gustave/hubot-synapse | 2 | # Description:
# Synapse funnels everything Hubot hears through Redis pub/sub so that it
# can be acted upon by outside observers.
#
# Dependencies:
# "redis":"^0.8.4"
#
# Configuration:
# REDISTOGO_URL or REDISCLOUD_URL or BOXEN_REDIS_URL or REDIS_URL.
# URL format: redis://<host>:<port>[/<brain_prefix>]
# If not provided, '<brain_prefix>' will default to the robot name.
#
# Commands:
# * - All hubot traffic is being sent through Redis pub/sub using Synapse.
#
# Author:
# AndrewGuenther
Redis = require "redis"
Url = require "url"
module.exports = (robot) ->
info = Url.parse process.env.REDISTOGO_URL or process.env.REDISCLOUD_URL or process.env.BOXEN_REDIS_URL or process.env.REDIS_URL or 'redis://localhost:6379', true
subClient = Redis.createClient(info.port, info.hostname)
pubClient = Redis.createClient(info.port, info.hostname)
prefix = info.path?.replace('/', '') or robot.name
channelName = (direction, action) ->
return "#{prefix}:#{direction}:#{action}"
envelopeGenerator = (user, room) ->
return {
user: robot.brain.userForId(user),
room: room,
message: null
}
subClient.on "pmessage", (pattern, channel, json) ->
try
message = JSON.parse json
catch
robot.logger.error "Received message on #{channel} was not valid JSON"
robot.logger.error json
return
func = channel.split(":")[2]
if func of robot.adapter and typeof robot.adapter[func] == 'function'
try
robot.adapter[func] envelopeGenerator(message.user, message.room), message.message
catch
robot.logger.error "Received malformed message data"
robot.logger.error json
return
else
robot.logger.error "Received message with invalid operation on #{channel}"
subClient.psubscribe "#{prefix}:out:*"
pubGenerator = (channel) ->
return (response) ->
robot.logger.debug "Message published to #{channel}"
message = response.message
pubClient.publish channelName("in", channel), JSON.stringify({
user: message.user.id,
room: message.room,
message: message.text
})
robot.hear /^/i, pubGenerator("hear")
robot.respond /.*/i, pubGenerator("respond")
robot.enter pubGenerator("enter")
robot.leave pubGenerator("leave")
robot.topic pubGenerator("topic")
robot.catchAll pubGenerator("catchAll")
| 51445 | # Description:
# Synapse funnels everything Hubot hears through Redis pub/sub so that it
# can be acted upon by outside observers.
#
# Dependencies:
# "redis":"^0.8.4"
#
# Configuration:
# REDISTOGO_URL or REDISCLOUD_URL or BOXEN_REDIS_URL or REDIS_URL.
# URL format: redis://<host>:<port>[/<brain_prefix>]
# If not provided, '<brain_prefix>' will default to the robot name.
#
# Commands:
# * - All hubot traffic is being sent through Redis pub/sub using Synapse.
#
# Author:
# <NAME>
Redis = require "redis"
Url = require "url"
module.exports = (robot) ->
info = Url.parse process.env.REDISTOGO_URL or process.env.REDISCLOUD_URL or process.env.BOXEN_REDIS_URL or process.env.REDIS_URL or 'redis://localhost:6379', true
subClient = Redis.createClient(info.port, info.hostname)
pubClient = Redis.createClient(info.port, info.hostname)
prefix = info.path?.replace('/', '') or robot.name
channelName = (direction, action) ->
return "#{prefix}:#{direction}:#{action}"
envelopeGenerator = (user, room) ->
return {
user: robot.brain.userForId(user),
room: room,
message: null
}
subClient.on "pmessage", (pattern, channel, json) ->
try
message = JSON.parse json
catch
robot.logger.error "Received message on #{channel} was not valid JSON"
robot.logger.error json
return
func = channel.split(":")[2]
if func of robot.adapter and typeof robot.adapter[func] == 'function'
try
robot.adapter[func] envelopeGenerator(message.user, message.room), message.message
catch
robot.logger.error "Received malformed message data"
robot.logger.error json
return
else
robot.logger.error "Received message with invalid operation on #{channel}"
subClient.psubscribe "#{prefix}:out:*"
pubGenerator = (channel) ->
return (response) ->
robot.logger.debug "Message published to #{channel}"
message = response.message
pubClient.publish channelName("in", channel), JSON.stringify({
user: message.user.id,
room: message.room,
message: message.text
})
robot.hear /^/i, pubGenerator("hear")
robot.respond /.*/i, pubGenerator("respond")
robot.enter pubGenerator("enter")
robot.leave pubGenerator("leave")
robot.topic pubGenerator("topic")
robot.catchAll pubGenerator("catchAll")
| true | # Description:
# Synapse funnels everything Hubot hears through Redis pub/sub so that it
# can be acted upon by outside observers.
#
# Dependencies:
# "redis":"^0.8.4"
#
# Configuration:
# REDISTOGO_URL or REDISCLOUD_URL or BOXEN_REDIS_URL or REDIS_URL.
# URL format: redis://<host>:<port>[/<brain_prefix>]
# If not provided, '<brain_prefix>' will default to the robot name.
#
# Commands:
# * - All hubot traffic is being sent through Redis pub/sub using Synapse.
#
# Author:
# PI:NAME:<NAME>END_PI
Redis = require "redis"
Url = require "url"
module.exports = (robot) ->
info = Url.parse process.env.REDISTOGO_URL or process.env.REDISCLOUD_URL or process.env.BOXEN_REDIS_URL or process.env.REDIS_URL or 'redis://localhost:6379', true
subClient = Redis.createClient(info.port, info.hostname)
pubClient = Redis.createClient(info.port, info.hostname)
prefix = info.path?.replace('/', '') or robot.name
channelName = (direction, action) ->
return "#{prefix}:#{direction}:#{action}"
envelopeGenerator = (user, room) ->
return {
user: robot.brain.userForId(user),
room: room,
message: null
}
subClient.on "pmessage", (pattern, channel, json) ->
try
message = JSON.parse json
catch
robot.logger.error "Received message on #{channel} was not valid JSON"
robot.logger.error json
return
func = channel.split(":")[2]
if func of robot.adapter and typeof robot.adapter[func] == 'function'
try
robot.adapter[func] envelopeGenerator(message.user, message.room), message.message
catch
robot.logger.error "Received malformed message data"
robot.logger.error json
return
else
robot.logger.error "Received message with invalid operation on #{channel}"
subClient.psubscribe "#{prefix}:out:*"
pubGenerator = (channel) ->
return (response) ->
robot.logger.debug "Message published to #{channel}"
message = response.message
pubClient.publish channelName("in", channel), JSON.stringify({
user: message.user.id,
room: message.room,
message: message.text
})
robot.hear /^/i, pubGenerator("hear")
robot.respond /.*/i, pubGenerator("respond")
robot.enter pubGenerator("enter")
robot.leave pubGenerator("leave")
robot.topic pubGenerator("topic")
robot.catchAll pubGenerator("catchAll")
|
[
{
"context": "ging logic for non-boolean features\n\t\t\tif key == 'compileGroup'\n\t\t\t\tif features['compileGroup'] == 'priority' or",
"end": 2628,
"score": 0.9766950607299805,
"start": 2616,
"tag": "KEY",
"value": "compileGroup"
},
{
"context": "s['compileGroup'] = 'standard'\n\t... | app/coffee/Features/Subscription/FeaturesUpdater.coffee | davidmehren/web-sharelatex | 0 | async = require("async")
PlansLocator = require("./PlansLocator")
_ = require("underscore")
SubscriptionLocator = require("./SubscriptionLocator")
UserFeaturesUpdater = require("./UserFeaturesUpdater")
Settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
ReferalFeatures = require("../Referal/ReferalFeatures")
V1SubscriptionManager = require("./V1SubscriptionManager")
oneMonthInSeconds = 60 * 60 * 24 * 30
module.exports = FeaturesUpdater =
refreshFeatures: (user_id, notifyV1 = true, callback = () ->)->
if typeof notifyV1 == 'function'
callback = notifyV1
notifyV1 = true
if notifyV1
V1SubscriptionManager.notifyV1OfFeaturesChange user_id, (error) ->
if error?
logger.err {err: error, user_id}, "error notifying v1 about updated features"
jobs =
individualFeatures: (cb) -> FeaturesUpdater._getIndividualFeatures user_id, cb
groupFeatureSets: (cb) -> FeaturesUpdater._getGroupFeatureSets user_id, cb
v1Features: (cb) -> FeaturesUpdater._getV1Features user_id, cb
bonusFeatures: (cb) -> ReferalFeatures.getBonusFeatures user_id, cb
async.series jobs, (err, results)->
if err?
logger.err err:err, user_id:user_id,
"error getting subscription or group for refreshFeatures"
return callback(err)
{individualFeatures, groupFeatureSets, v1Features, bonusFeatures} = results
logger.log {user_id, individualFeatures, groupFeatureSets, v1Features, bonusFeatures}, 'merging user features'
featureSets = groupFeatureSets.concat [individualFeatures, v1Features, bonusFeatures]
features = _.reduce(featureSets, FeaturesUpdater._mergeFeatures, Settings.defaultFeatures)
logger.log {user_id, features}, 'updating user features'
UserFeaturesUpdater.updateFeatures user_id, features, callback
_getIndividualFeatures: (user_id, callback = (error, features = {}) ->) ->
SubscriptionLocator.getUsersSubscription user_id, (err, sub)->
callback err, FeaturesUpdater._subscriptionToFeatures(sub)
_getGroupFeatureSets: (user_id, callback = (error, featureSets = []) ->) ->
SubscriptionLocator.getGroupSubscriptionsMemberOf user_id, (err, subs) ->
callback err, (subs or []).map FeaturesUpdater._subscriptionToFeatures
_getV1Features: (user_id, callback = (error, features = {}) ->) ->
V1SubscriptionManager.getPlanCodeFromV1 user_id, (err, planCode) ->
callback err, FeaturesUpdater._planCodeToFeatures(planCode)
_mergeFeatures: (featuresA, featuresB) ->
features = Object.assign({}, featuresA)
for key, value of featuresB
# Special merging logic for non-boolean features
if key == 'compileGroup'
if features['compileGroup'] == 'priority' or featuresB['compileGroup'] == 'priority'
features['compileGroup'] = 'priority'
else
features['compileGroup'] = 'standard'
else if key == 'collaborators'
if features['collaborators'] == -1 or featuresB['collaborators'] == -1
features['collaborators'] = -1
else
features['collaborators'] = Math.max(
features['collaborators'] or 0,
featuresB['collaborators'] or 0
)
else if key == 'compileTimeout'
features['compileTimeout'] = Math.max(
features['compileTimeout'] or 0,
featuresB['compileTimeout'] or 0
)
else
# Boolean keys, true is better
features[key] = features[key] or featuresB[key]
return features
_subscriptionToFeatures: (subscription) ->
FeaturesUpdater._planCodeToFeatures(subscription?.planCode)
_planCodeToFeatures: (planCode) ->
if !planCode?
return {}
plan = PlansLocator.findLocalPlanInSettings planCode
if !plan?
return {}
else
return plan.features
| 93014 | async = require("async")
PlansLocator = require("./PlansLocator")
_ = require("underscore")
SubscriptionLocator = require("./SubscriptionLocator")
UserFeaturesUpdater = require("./UserFeaturesUpdater")
Settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
ReferalFeatures = require("../Referal/ReferalFeatures")
V1SubscriptionManager = require("./V1SubscriptionManager")
oneMonthInSeconds = 60 * 60 * 24 * 30
module.exports = FeaturesUpdater =
refreshFeatures: (user_id, notifyV1 = true, callback = () ->)->
if typeof notifyV1 == 'function'
callback = notifyV1
notifyV1 = true
if notifyV1
V1SubscriptionManager.notifyV1OfFeaturesChange user_id, (error) ->
if error?
logger.err {err: error, user_id}, "error notifying v1 about updated features"
jobs =
individualFeatures: (cb) -> FeaturesUpdater._getIndividualFeatures user_id, cb
groupFeatureSets: (cb) -> FeaturesUpdater._getGroupFeatureSets user_id, cb
v1Features: (cb) -> FeaturesUpdater._getV1Features user_id, cb
bonusFeatures: (cb) -> ReferalFeatures.getBonusFeatures user_id, cb
async.series jobs, (err, results)->
if err?
logger.err err:err, user_id:user_id,
"error getting subscription or group for refreshFeatures"
return callback(err)
{individualFeatures, groupFeatureSets, v1Features, bonusFeatures} = results
logger.log {user_id, individualFeatures, groupFeatureSets, v1Features, bonusFeatures}, 'merging user features'
featureSets = groupFeatureSets.concat [individualFeatures, v1Features, bonusFeatures]
features = _.reduce(featureSets, FeaturesUpdater._mergeFeatures, Settings.defaultFeatures)
logger.log {user_id, features}, 'updating user features'
UserFeaturesUpdater.updateFeatures user_id, features, callback
_getIndividualFeatures: (user_id, callback = (error, features = {}) ->) ->
SubscriptionLocator.getUsersSubscription user_id, (err, sub)->
callback err, FeaturesUpdater._subscriptionToFeatures(sub)
_getGroupFeatureSets: (user_id, callback = (error, featureSets = []) ->) ->
SubscriptionLocator.getGroupSubscriptionsMemberOf user_id, (err, subs) ->
callback err, (subs or []).map FeaturesUpdater._subscriptionToFeatures
_getV1Features: (user_id, callback = (error, features = {}) ->) ->
V1SubscriptionManager.getPlanCodeFromV1 user_id, (err, planCode) ->
callback err, FeaturesUpdater._planCodeToFeatures(planCode)
_mergeFeatures: (featuresA, featuresB) ->
features = Object.assign({}, featuresA)
for key, value of featuresB
# Special merging logic for non-boolean features
if key == '<KEY>'
if features['compileGroup'] == 'priority' or featuresB['compileGroup'] == 'priority'
features['compileGroup'] = 'priority'
else
features['compileGroup'] = 'standard'
else if key == '<KEY>'
if features['collaborators'] == -1 or featuresB['collaborators'] == -1
features['collaborators'] = -1
else
features['collaborators'] = Math.max(
features['collaborators'] or 0,
featuresB['collaborators'] or 0
)
else if key == '<KEY>'
features['compileTimeout'] = Math.max(
features['compileTimeout'] or 0,
featuresB['compileTimeout'] or 0
)
else
# Boolean keys, true is better
features[key] = features[key] or featuresB[key]
return features
_subscriptionToFeatures: (subscription) ->
FeaturesUpdater._planCodeToFeatures(subscription?.planCode)
_planCodeToFeatures: (planCode) ->
if !planCode?
return {}
plan = PlansLocator.findLocalPlanInSettings planCode
if !plan?
return {}
else
return plan.features
| true | async = require("async")
PlansLocator = require("./PlansLocator")
_ = require("underscore")
SubscriptionLocator = require("./SubscriptionLocator")
UserFeaturesUpdater = require("./UserFeaturesUpdater")
Settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
ReferalFeatures = require("../Referal/ReferalFeatures")
V1SubscriptionManager = require("./V1SubscriptionManager")
oneMonthInSeconds = 60 * 60 * 24 * 30
module.exports = FeaturesUpdater =
refreshFeatures: (user_id, notifyV1 = true, callback = () ->)->
if typeof notifyV1 == 'function'
callback = notifyV1
notifyV1 = true
if notifyV1
V1SubscriptionManager.notifyV1OfFeaturesChange user_id, (error) ->
if error?
logger.err {err: error, user_id}, "error notifying v1 about updated features"
jobs =
individualFeatures: (cb) -> FeaturesUpdater._getIndividualFeatures user_id, cb
groupFeatureSets: (cb) -> FeaturesUpdater._getGroupFeatureSets user_id, cb
v1Features: (cb) -> FeaturesUpdater._getV1Features user_id, cb
bonusFeatures: (cb) -> ReferalFeatures.getBonusFeatures user_id, cb
async.series jobs, (err, results)->
if err?
logger.err err:err, user_id:user_id,
"error getting subscription or group for refreshFeatures"
return callback(err)
{individualFeatures, groupFeatureSets, v1Features, bonusFeatures} = results
logger.log {user_id, individualFeatures, groupFeatureSets, v1Features, bonusFeatures}, 'merging user features'
featureSets = groupFeatureSets.concat [individualFeatures, v1Features, bonusFeatures]
features = _.reduce(featureSets, FeaturesUpdater._mergeFeatures, Settings.defaultFeatures)
logger.log {user_id, features}, 'updating user features'
UserFeaturesUpdater.updateFeatures user_id, features, callback
_getIndividualFeatures: (user_id, callback = (error, features = {}) ->) ->
SubscriptionLocator.getUsersSubscription user_id, (err, sub)->
callback err, FeaturesUpdater._subscriptionToFeatures(sub)
_getGroupFeatureSets: (user_id, callback = (error, featureSets = []) ->) ->
SubscriptionLocator.getGroupSubscriptionsMemberOf user_id, (err, subs) ->
callback err, (subs or []).map FeaturesUpdater._subscriptionToFeatures
_getV1Features: (user_id, callback = (error, features = {}) ->) ->
V1SubscriptionManager.getPlanCodeFromV1 user_id, (err, planCode) ->
callback err, FeaturesUpdater._planCodeToFeatures(planCode)
_mergeFeatures: (featuresA, featuresB) ->
features = Object.assign({}, featuresA)
for key, value of featuresB
# Special merging logic for non-boolean features
if key == 'PI:KEY:<KEY>END_PI'
if features['compileGroup'] == 'priority' or featuresB['compileGroup'] == 'priority'
features['compileGroup'] = 'priority'
else
features['compileGroup'] = 'standard'
else if key == 'PI:KEY:<KEY>END_PI'
if features['collaborators'] == -1 or featuresB['collaborators'] == -1
features['collaborators'] = -1
else
features['collaborators'] = Math.max(
features['collaborators'] or 0,
featuresB['collaborators'] or 0
)
else if key == 'PI:KEY:<KEY>END_PI'
features['compileTimeout'] = Math.max(
features['compileTimeout'] or 0,
featuresB['compileTimeout'] or 0
)
else
# Boolean keys, true is better
features[key] = features[key] or featuresB[key]
return features
_subscriptionToFeatures: (subscription) ->
FeaturesUpdater._planCodeToFeatures(subscription?.planCode)
_planCodeToFeatures: (planCode) ->
if !planCode?
return {}
plan = PlansLocator.findLocalPlanInSettings planCode
if !plan?
return {}
else
return plan.features
|
[
{
"context": "pkg.version %> - Linked Media Player\n by Szaby Gruenwald, Cristian Bara and the ConnectMe Project.\n ",
"end": 119,
"score": 0.9998748302459717,
"start": 104,
"tag": "NAME",
"value": "Szaby Gruenwald"
},
{
"context": "Linked Media Player\n by ... | Gruntfile.coffee | tkurz/lime | 1 | module.exports = ->
banner = """/* Lime Player <%= pkg.version %> - Linked Media Player
by Szaby Gruenwald, Cristian Bara and the ConnectMe Project.
Available under the Apache License, Version 2.0
See http://connectme.sti2.org/ for more information.
*/"""
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# Build setup: concatenate source files
coffee:
compile:
files:
'lib/lime-core.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/plugins/ldplugin.coffee'
'src/jquery.scrollTo.coffee'
'src/settings/usersettings.coffee'
]
'lib/lime.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
mult_plugins:
src: ['src/plugins/*.coffee']
dest: 'lib/plugins/'
flatten: true
expand: true
ext: '.js'
mult_settings:
src: ['src/settings/*.coffee']
dest: 'lib/settings/'
flatten: true
expand: true
ext: '.js'
# JavaScript minification
uglify:
options:
banner: banner
report: 'min'
full:
files:
'lib/lime.min.js': ['lib/lime.js']
core:
files:
'lib/lime-core.min.js': ['lib/lime-core.js']
deps:
files:
'lib/lime-deps.min.js': ['lib/lime-deps.js']
concat:
# core is the lime player without extra plugins
core:
src: ['lib/lime-core.js','lib/utils.js']
dest: 'lib/lime-core.js'
# full is the lime player with all plugins
full:
src:['lib/lime.js','lib/utils.js']
dest: 'lib/lime.js'
# deps is the dependencies without the player itself.
deps:
src:['lib/underscoreJS/underscore.min.js', 'lib/backboneJS/backbone.js', 'lib/rdfquery/latest/jquery.rdfquery.debug.js', 'lib/vie/vie.js'],
dest: 'lib/lime-deps.js'
# Add short info in front of the produced file
usebanner:
coffee:
options:
position: 'top' || 'bottom'
banner: banner
files:
src: [ 'lib/lime.js', 'lib/lime-core.js' ]
docco_husky:
project_name: "LIME - Linked Media Player"
# show_timestamp: false
files: [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
# Automated recompilation and testing when developing
watch:
# Files to watch
files: [
'src/*.coffee'
'src/**/*.coffee'
]
# Tasks to run on change
tasks: ['build']
# Build dependencies
@loadNpmTasks 'grunt-contrib-coffee'
@loadNpmTasks 'grunt-contrib-concat'
@loadNpmTasks 'grunt-contrib-uglify'
@loadNpmTasks 'grunt-banner'
@loadNpmTasks 'grunt-docco-husky'
# Testing dependencies
# @loadNpmTasks 'grunt-contrib-jshint'
# @loadNpmTasks 'grunt-contrib-qunit'
# @loadNpmTasks 'grunt-contrib-nodeunit'
@loadNpmTasks 'grunt-contrib-watch'
# Local tasks
@registerTask 'build', ["coffee", "concat", "usebanner", "uglify"]
@registerTask 'default', 'build'
@registerTask 'doc', =>
@task.run "docco_husky" | 121513 | module.exports = ->
banner = """/* Lime Player <%= pkg.version %> - Linked Media Player
by <NAME>, <NAME> and the ConnectMe Project.
Available under the Apache License, Version 2.0
See http://connectme.sti2.org/ for more information.
*/"""
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# Build setup: concatenate source files
coffee:
compile:
files:
'lib/lime-core.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/plugins/ldplugin.coffee'
'src/jquery.scrollTo.coffee'
'src/settings/usersettings.coffee'
]
'lib/lime.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
mult_plugins:
src: ['src/plugins/*.coffee']
dest: 'lib/plugins/'
flatten: true
expand: true
ext: '.js'
mult_settings:
src: ['src/settings/*.coffee']
dest: 'lib/settings/'
flatten: true
expand: true
ext: '.js'
# JavaScript minification
uglify:
options:
banner: banner
report: 'min'
full:
files:
'lib/lime.min.js': ['lib/lime.js']
core:
files:
'lib/lime-core.min.js': ['lib/lime-core.js']
deps:
files:
'lib/lime-deps.min.js': ['lib/lime-deps.js']
concat:
# core is the lime player without extra plugins
core:
src: ['lib/lime-core.js','lib/utils.js']
dest: 'lib/lime-core.js'
# full is the lime player with all plugins
full:
src:['lib/lime.js','lib/utils.js']
dest: 'lib/lime.js'
# deps is the dependencies without the player itself.
deps:
src:['lib/underscoreJS/underscore.min.js', 'lib/backboneJS/backbone.js', 'lib/rdfquery/latest/jquery.rdfquery.debug.js', 'lib/vie/vie.js'],
dest: 'lib/lime-deps.js'
# Add short info in front of the produced file
usebanner:
coffee:
options:
position: 'top' || 'bottom'
banner: banner
files:
src: [ 'lib/lime.js', 'lib/lime-core.js' ]
docco_husky:
project_name: "LIME - Linked Media Player"
# show_timestamp: false
files: [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
# Automated recompilation and testing when developing
watch:
# Files to watch
files: [
'src/*.coffee'
'src/**/*.coffee'
]
# Tasks to run on change
tasks: ['build']
# Build dependencies
@loadNpmTasks 'grunt-contrib-coffee'
@loadNpmTasks 'grunt-contrib-concat'
@loadNpmTasks 'grunt-contrib-uglify'
@loadNpmTasks 'grunt-banner'
@loadNpmTasks 'grunt-docco-husky'
# Testing dependencies
# @loadNpmTasks 'grunt-contrib-jshint'
# @loadNpmTasks 'grunt-contrib-qunit'
# @loadNpmTasks 'grunt-contrib-nodeunit'
@loadNpmTasks 'grunt-contrib-watch'
# Local tasks
@registerTask 'build', ["coffee", "concat", "usebanner", "uglify"]
@registerTask 'default', 'build'
@registerTask 'doc', =>
@task.run "docco_husky" | true | module.exports = ->
banner = """/* Lime Player <%= pkg.version %> - Linked Media Player
by PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI and the ConnectMe Project.
Available under the Apache License, Version 2.0
See http://connectme.sti2.org/ for more information.
*/"""
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# Build setup: concatenate source files
coffee:
compile:
files:
'lib/lime-core.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/plugins/ldplugin.coffee'
'src/jquery.scrollTo.coffee'
'src/settings/usersettings.coffee'
]
'lib/lime.js': [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
mult_plugins:
src: ['src/plugins/*.coffee']
dest: 'lib/plugins/'
flatten: true
expand: true
ext: '.js'
mult_settings:
src: ['src/settings/*.coffee']
dest: 'lib/settings/'
flatten: true
expand: true
ext: '.js'
# JavaScript minification
uglify:
options:
banner: banner
report: 'min'
full:
files:
'lib/lime.min.js': ['lib/lime.js']
core:
files:
'lib/lime-core.min.js': ['lib/lime-core.js']
deps:
files:
'lib/lime-deps.min.js': ['lib/lime-deps.js']
concat:
# core is the lime player without extra plugins
core:
src: ['lib/lime-core.js','lib/utils.js']
dest: 'lib/lime-core.js'
# full is the lime player with all plugins
full:
src:['lib/lime.js','lib/utils.js']
dest: 'lib/lime.js'
# deps is the dependencies without the player itself.
deps:
src:['lib/underscoreJS/underscore.min.js', 'lib/backboneJS/backbone.js', 'lib/rdfquery/latest/jquery.rdfquery.debug.js', 'lib/vie/vie.js'],
dest: 'lib/lime-deps.js'
# Add short info in front of the produced file
usebanner:
coffee:
options:
position: 'top' || 'bottom'
banner: banner
files:
src: [ 'lib/lime.js', 'lib/lime-core.js' ]
docco_husky:
project_name: "LIME - Linked Media Player"
# show_timestamp: false
files: [
'src/lime.coffee'
'src/annotation.coffee'
'src/plugin.coffee'
'src/widget.coffee'
'src/videojs-adapter.coffee'
'src/cmf.coffee'
'src/annotationoverlays.coffee'
'src/jquery.scrollTo.coffee'
'src/plugins/*.coffee'
'src/settings/*.coffee'
]
# Automated recompilation and testing when developing
watch:
# Files to watch
files: [
'src/*.coffee'
'src/**/*.coffee'
]
# Tasks to run on change
tasks: ['build']
# Build dependencies
@loadNpmTasks 'grunt-contrib-coffee'
@loadNpmTasks 'grunt-contrib-concat'
@loadNpmTasks 'grunt-contrib-uglify'
@loadNpmTasks 'grunt-banner'
@loadNpmTasks 'grunt-docco-husky'
# Testing dependencies
# @loadNpmTasks 'grunt-contrib-jshint'
# @loadNpmTasks 'grunt-contrib-qunit'
# @loadNpmTasks 'grunt-contrib-nodeunit'
@loadNpmTasks 'grunt-contrib-watch'
# Local tasks
@registerTask 'build', ["coffee", "concat", "usebanner", "uglify"]
@registerTask 'default', 'build'
@registerTask 'doc', =>
@task.run "docco_husky" |
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.998812735080719,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-fs-symlink.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
exec = require("child_process").exec
completed = 0
expected_tests = 2
is_windows = process.platform is "win32"
runtest = (skip_symlinks) ->
unless skip_symlinks
# test creating and reading symbolic link
linkData = path.join(common.fixturesDir, "/cycles/root.js")
linkPath = path.join(common.tmpDir, "symlink1.js")
# Delete previously created link
try
fs.unlinkSync linkPath
fs.symlink linkData, linkPath, (err) ->
throw err if err
console.log "symlink done"
# todo: fs.lstat?
fs.readlink linkPath, (err, destination) ->
throw err if err
assert.equal destination, linkData
completed++
return
return
# test creating and reading hard link
srcPath = path.join(common.fixturesDir, "cycles", "root.js")
dstPath = path.join(common.tmpDir, "link1.js")
# Delete previously created link
try
fs.unlinkSync dstPath
fs.link srcPath, dstPath, (err) ->
throw err if err
console.log "hard link done"
srcContent = fs.readFileSync(srcPath, "utf8")
dstContent = fs.readFileSync(dstPath, "utf8")
assert.equal srcContent, dstContent
completed++
return
return
if is_windows
# On Windows, creating symlinks requires admin privileges.
# We'll only try to run symlink test if we have enough privileges.
exec "whoami /priv", (err, o) ->
if err or o.indexOf("SeCreateSymbolicLinkPrivilege") is -1
expected_tests = 1
runtest true
else
runtest false
return
else
runtest false
process.on "exit", ->
assert.equal completed, expected_tests
return
| 221834 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
exec = require("child_process").exec
completed = 0
expected_tests = 2
is_windows = process.platform is "win32"
runtest = (skip_symlinks) ->
unless skip_symlinks
# test creating and reading symbolic link
linkData = path.join(common.fixturesDir, "/cycles/root.js")
linkPath = path.join(common.tmpDir, "symlink1.js")
# Delete previously created link
try
fs.unlinkSync linkPath
fs.symlink linkData, linkPath, (err) ->
throw err if err
console.log "symlink done"
# todo: fs.lstat?
fs.readlink linkPath, (err, destination) ->
throw err if err
assert.equal destination, linkData
completed++
return
return
# test creating and reading hard link
srcPath = path.join(common.fixturesDir, "cycles", "root.js")
dstPath = path.join(common.tmpDir, "link1.js")
# Delete previously created link
try
fs.unlinkSync dstPath
fs.link srcPath, dstPath, (err) ->
throw err if err
console.log "hard link done"
srcContent = fs.readFileSync(srcPath, "utf8")
dstContent = fs.readFileSync(dstPath, "utf8")
assert.equal srcContent, dstContent
completed++
return
return
if is_windows
# On Windows, creating symlinks requires admin privileges.
# We'll only try to run symlink test if we have enough privileges.
exec "whoami /priv", (err, o) ->
if err or o.indexOf("SeCreateSymbolicLinkPrivilege") is -1
expected_tests = 1
runtest true
else
runtest false
return
else
runtest false
process.on "exit", ->
assert.equal completed, expected_tests
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
exec = require("child_process").exec
completed = 0
expected_tests = 2
is_windows = process.platform is "win32"
runtest = (skip_symlinks) ->
unless skip_symlinks
# test creating and reading symbolic link
linkData = path.join(common.fixturesDir, "/cycles/root.js")
linkPath = path.join(common.tmpDir, "symlink1.js")
# Delete previously created link
try
fs.unlinkSync linkPath
fs.symlink linkData, linkPath, (err) ->
throw err if err
console.log "symlink done"
# todo: fs.lstat?
fs.readlink linkPath, (err, destination) ->
throw err if err
assert.equal destination, linkData
completed++
return
return
# test creating and reading hard link
srcPath = path.join(common.fixturesDir, "cycles", "root.js")
dstPath = path.join(common.tmpDir, "link1.js")
# Delete previously created link
try
fs.unlinkSync dstPath
fs.link srcPath, dstPath, (err) ->
throw err if err
console.log "hard link done"
srcContent = fs.readFileSync(srcPath, "utf8")
dstContent = fs.readFileSync(dstPath, "utf8")
assert.equal srcContent, dstContent
completed++
return
return
if is_windows
# On Windows, creating symlinks requires admin privileges.
# We'll only try to run symlink test if we have enough privileges.
exec "whoami /priv", (err, o) ->
if err or o.indexOf("SeCreateSymbolicLinkPrivilege") is -1
expected_tests = 1
runtest true
else
runtest false
return
else
runtest false
process.on "exit", ->
assert.equal completed, expected_tests
return
|
[
{
"context": "ank:\"species\"\n sc:\"chrysolaus\"\n ja:\"アカハラ\"\n }\n {\n \"alien\":false,\n up",
"end": 107441,
"score": 0.670654833316803,
"start": 107440,
"tag": "NAME",
"value": "ア"
},
{
"context": " upper:\"fabalis\"\n sc:\"curtus\"\n ... | migrations/migrate.coffee | KamataRyo/bird-api | 1 | # mongo birdAPI
# this is db import script
db.dropDatabase()
data_names = [
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"podicipediformes"
ja:"カイツブリ目"
rank:"order"
}
{
sc:"phaethontiformes"
ja:"ネッタイチョウ目"
rank:"order"
}
{
sc:"pterocliformes"
ja:"サケイ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"gaviiformes"
ja:"アビ目"
rank:"order"
}
{
sc:"procellariiformes"
ja:"ミズナギドリ目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"suliformes"
ja:"カツオドリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"gruiformes"
ja:"ツル目"
rank:"order"
}
{
sc:"otidiformes"
ja:"ノガン目"
rank:"order"
}
{
sc:"cuculiformes"
ja:"カッコウ目"
rank:"order"
}
{
sc:"caprimulgiformes"
ja:"ヨタカ目"
rank:"order"
}
{
sc:"apodiformes"
ja:"アマツバメ目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"accipitriformes"
ja:"タカ目"
rank:"order"
}
{
sc:"strigiformes"
ja:"フクロウ目"
rank:"order"
}
{
sc:"bucerotiformes"
ja:"サイチョウ目"
rank:"order"
}
{
sc:"coraciiformes"
ja:"ブッポウソウ目"
rank:"order"
}
{
sc:"piciformes"
ja:"キツツキ目"
rank:"order"
}
{
sc:"falconiformes"
ja:"ハヤブサ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"psittaciformes"
ja:"インコ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"phasianidae"
ja:"キジ科"
rank:"family"
upper:"galliformes"
}
{
sc:"anatidae"
ja:"カモ科"
rank:"family"
upper:"anseriformes"
}
{
sc:"podicipedidae"
ja:"カイツブリ科"
rank:"family"
upper:"podicipediformes"
}
{
sc:"phaethontidae"
ja:"ネッタイチョウ科"
rank:"family"
upper:"phaethontiformes"
}
{
sc:"pteroclidae"
ja:"サケイ科"
rank:"family"
upper:"pterocliformes"
}
{
sc:"columbidae"
ja:"ハト科"
rank:"family"
upper:"columbiformes"
}
{
sc:"gaviidae"
ja:"アビ科"
rank:"family"
upper:"gaviiformes"
}
{
sc:"diomedeidae"
ja:"アホウドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"procellariidae"
ja:"ミズナギドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"hydrobatidae"
ja:"ウミツバメ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"ciconiidae"
ja:"コウノトリ科"
rank:"family"
upper:"ciconiiformes"
}
{
sc:"fregatidae"
ja:"グンカンドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"sulidae"
ja:"カツオドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"phalacrocoracidae"
ja:"ウ科"
rank:"family"
upper:"suliformes"
}
{
sc:"pelecanidae"
ja:"ペリカン科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"ardeidae"
ja:"サギ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"threskiornithidae"
ja:"トキ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"gruidae"
ja:"ツル科"
rank:"family"
upper:"gruiformes"
}
{
sc:"rallidae"
ja:"クイナ科"
rank:"family"
upper:"gruiformes"
}
{
sc:"otididae"
ja:"ノガン科"
rank:"family"
upper:"otidiformes"
}
{
sc:"cuculidae"
ja:"カッコウ科"
rank:"family"
upper:"cuculiformes"
}
{
sc:"caprimulgidae"
ja:"ヨタカ科"
rank:"family"
upper:"caprimulgiformes"
}
{
sc:"apodidae"
ja:"アマツバメ科"
rank:"family"
upper:"apodiformes"
}
{
sc:"charadriidae"
ja:"チドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"haematopodidae"
ja:"ミヤコドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"recurvirostridae"
ja:"セイタカシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"scolopacidae"
ja:"シギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"jacanidae"
ja:"レンカク科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"rostratulidae"
ja:"タマシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"turnicidae"
ja:"ミフウズラ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"glareolidae"
ja:"ツバメチドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"laridae"
ja:"カモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"stercorariidae"
ja:"トウゾクカモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"alcidae"
ja:"ウミスズメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"pandionidae"
ja:"ミサゴ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"accipitridae"
ja:"タカ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"tytonidae"
ja:"メンフクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"strigidae"
ja:"フクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"upupidae"
ja:"ヤツガシラ科"
rank:"family"
upper:"bucerotiformes"
}
{
sc:"alcedinidae"
ja:"カワセミ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"meropidae"
ja:"ハチクイ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"coraciidae"
ja:"ブッポウソウ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"picidae"
ja:"キツツキ科"
rank:"family"
upper:"piciformes"
}
{
sc:"falconidae"
ja:"ハヤブサ科"
rank:"family"
upper:"falconiformes"
}
{
sc:"pittidae"
ja:"ヤイロチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"artamidae"
ja:"モリツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"campephagidae"
ja:"サンショウクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"oriolidae"
ja:"コウライウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"dicruridae"
ja:"オウチュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"monarchidae"
ja:"カササギヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"laniidae"
ja:"モズ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"corvidae"
ja:"カラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"regulidae"
ja:"キクイタダキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"remizidae"
ja:"ツリスガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"paridae"
ja:"シジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"panuridae"
ja:"ヒゲガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"alaudidae"
ja:"ヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"hirundinidae"
ja:"ツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"pycnonotidae"
ja:"ヒヨドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cettiidae"
ja:"ウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"aegithalidae"
ja:"エナガ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"phylloscopidae"
ja:"ムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sylviidae"
ja:"ズグロムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"zosteropidae"
ja:"メジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"locustellidae"
ja:"センニュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"acrocephalidae"
ja:"ヨシキリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cisticolidae"
ja:"セッカ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"bombycillidae"
ja:"レンジャク科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sittidae"
ja:"ゴジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"certhiidae"
ja:"キバシリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"troglodytidae"
ja:"ミソサザイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sturnidae"
ja:"ムクドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cinclidae"
ja:"カワガラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"muscicapidae"
ja:"ヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"prunellidae"
ja:"イワヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"passeridae"
ja:"スズメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"motacillidae"
ja:"セキレイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"fringillidae"
ja:"アトリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"calcariidae"
ja:"ツメナガホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"parulidae"
ja:"アメリカムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"emberizidae"
ja:"ホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"psittacidae"
ja:"インコ科"
rank:"family"
upper:"psittaciformes"
}
{
sc:"timaliidae"
ja:"チメドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"ploceidae"
ja:"ハタオリドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"estrildidae"
ja:"カエデチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"viduidae"
ja:"テンニンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"thraupidae"
ja:"フウキンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"tetrastes"
ja:"エゾライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"lagopus"
ja:"ライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"coturnix"
ja:"ウズラ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"dendrocygna"
ja:"リュウキュウガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anser"
ja:"マガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tadorna"
ja:"ツクシガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aix"
ja:"オシドリ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"nettapus"
ja:"ナンキンオシ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anas"
ja:"マガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"netta"
ja:"アカハシハジロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aythya"
ja:"スズガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"polysticta"
ja:"コケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"somateria"
ja:"ケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"histrionicus"
ja:"シノリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"melanitta"
ja:"ビロードキンクロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"clangula"
ja:"コオリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"bucephala"
ja:"ホオジロガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergellus"
ja:"ミコアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergus"
ja:"ウミアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tachybaptus"
ja:"カイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"podiceps"
ja:"カンムリカイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"phaethon"
ja:"ネッタイチョウ属"
rank:"genus"
upper:"phaethontidae"
}
{
sc:"syrrhaptes"
ja:"サケイ属"
rank:"genus"
upper:"pteroclidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"chalcophaps"
ja:"キンバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"treron"
ja:"アオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ptilinopus"
ja:"ヒメアオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"gavia"
ja:"アビ属"
rank:"genus"
upper:"gaviidae"
}
{
sc:"phoebastria"
ja:"アホウドリ属"
rank:"genus"
upper:"diomedeidae"
}
{
sc:"fulmarus"
ja:"フルマカモメ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"pterodroma"
ja:"シロハラミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"calonectris"
ja:"オオミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"puffinus"
ja:"ハイイロミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"bulweria"
ja:"アナドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"oceanites"
ja:"アシナガウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"oceanodroma"
ja:"オーストンウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"fregata"
ja:"グンカンドリ属"
rank:"genus"
upper:"fregatidae"
}
{
sc:"sula"
ja:"カツオドリ属"
rank:"genus"
upper:"sulidae"
}
{
sc:"phalacrocorax"
ja:"ウ属"
rank:"genus"
upper:"phalacrocoracidae"
}
{
sc:"pelecanus"
ja:"ペリカン属"
rank:"genus"
upper:"pelecanidae"
}
{
sc:"botaurus"
ja:"サンカノゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ixobrychus"
ja:"ヨシゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"gorsachius"
ja:"ミゾゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"nycticorax"
ja:"ゴイサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"butorides"
ja:"ササゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardeola"
ja:"アカガシラサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"bubulcus"
ja:"アマサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardea"
ja:"アオサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"egretta"
ja:"コサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"threskiornis"
ja:"クロトキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"platalea"
ja:"ヘラサギ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"grus"
ja:"ツル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"anthropoides"
ja:"アネハヅル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"coturnicops"
ja:"シマクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallina"
ja:"オオクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallirallus"
ja:"ヤンバルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallus"
ja:"クイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"amaurornis"
ja:"シロハラクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"porzana"
ja:"ヒメクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallicrex"
ja:"ツルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallinula"
ja:"バン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"fulica"
ja:"オオバン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"otis"
ja:"ノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"tetrax"
ja:"ヒメノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"centropus"
ja:"バンケン属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"clamator"
ja:"カンムリカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"eudynamys"
ja:"オニカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"urodynamis"
ja:"キジカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"surniculus"
ja:"オウチュウカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"hierococcyx"
ja:"ジュウイチ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"cuculus"
ja:"カッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"caprimulgus"
ja:"ヨタカ属"
rank:"genus"
upper:"caprimulgidae"
}
{
sc:"aerodramus"
ja:"ヒマラヤアナツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"hirundapus"
ja:"ハリオアマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"apus"
ja:"アマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"vanellus"
ja:"タゲリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"pluvialis"
ja:"ムナグロ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"charadrius"
ja:"チドリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"haematopus"
ja:"ミヤコドリ属"
rank:"genus"
upper:"haematopodidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"recurvirostra"
ja:"ソリハシセイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"scolopax"
ja:"ヤマシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"lymnocryptes"
ja:"コシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"gallinago"
ja:"タシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limnodromus"
ja:"オオハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limosa"
ja:"オグロシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"numenius"
ja:"ダイシャクシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tringa"
ja:"クサシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"heteroscelus"
ja:"キアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"xenus"
ja:"ソリハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"actitis"
ja:"イソシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"arenaria"
ja:"キョウジョシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"calidris"
ja:"オバシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"eurynorhynchus"
ja:"ヘラシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limicola"
ja:"キリアイ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tryngites"
ja:"コモンシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"philomachus"
ja:"エリマキシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"phalaropus"
ja:"ヒレアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"hydrophasianus"
ja:"レンカク属"
rank:"genus"
upper:"jacanidae"
}
{
sc:"rostratula"
ja:"タマシギ属"
rank:"genus"
upper:"rostratulidae"
}
{
sc:"turnix"
ja:"ミフウズラ属"
rank:"genus"
upper:"turnicidae"
}
{
sc:"glareola"
ja:"ツバメチドリ属"
rank:"genus"
upper:"glareolidae"
}
{
sc:"anous"
ja:"クロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"procelsterna"
ja:"ハイイロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gygis"
ja:"シロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rissa"
ja:"ミツユビカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"pagophila"
ja:"ゾウゲカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"xema"
ja:"クビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rhodostethia"
ja:"ヒメクビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"larus"
ja:"カモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gelochelidon"
ja:"ハシブトアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"sterna"
ja:"アジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"chlidonias"
ja:"クロハラアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"stercorarius"
ja:"トウゾクカモメ属"
rank:"genus"
upper:"stercorariidae"
}
{
sc:"alle"
ja:"ヒメウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"uria"
ja:"ウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"alca"
ja:"オオハシウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cepphus"
ja:"ウミバト属"
rank:"genus"
upper:"alcidae"
}
{
sc:"brachyramphus"
ja:"マダラウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"synthliboramphus"
ja:"ウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"aethia"
ja:"エトロフウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cerorhinca"
ja:"ウトウ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"fratercula"
ja:"ツノメドリ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"pandion"
ja:"ミサゴ属"
rank:"genus"
upper:"pandionidae"
}
{
sc:"pernis"
ja:"ハチクマ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"elanus"
ja:"カタグロトビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"milvus"
ja:"トビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"haliaeetus"
ja:"オジロワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aegypius"
ja:"クロハゲワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"spilornis"
ja:"カンムリワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"circus"
ja:"チュウヒ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"accipiter"
ja:"ハイタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"butastur"
ja:"サシバ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"buteo"
ja:"ノスリ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aquila"
ja:"イヌワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"nisaetus"
ja:"クマタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"tyto"
ja:"メンフクロウ属"
rank:"genus"
upper:"tytonidae"
}
{
sc:"otus"
ja:"コノハズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"bubo"
ja:"ワシミミズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ketupa"
ja:"シマフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"strix"
ja:"フクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"aegolius"
ja:"キンメフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ninox"
ja:"アオバズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"asio"
ja:"トラフズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"upupa"
ja:"ヤツガシラ属"
rank:"genus"
upper:"upupidae"
}
{
sc:"halcyon"
ja:"アカショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"todiramphus"
ja:"ナンヨウショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"alcedo"
ja:"カワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"ceyx"
ja:"ミツユビカワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"megaceryle"
ja:"ヤマセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"merops"
ja:"ハチクイ属"
rank:"genus"
upper:"meropidae"
}
{
sc:"eurystomus"
ja:"ブッポウソウ属"
rank:"genus"
upper:"coraciidae"
}
{
sc:"jynx"
ja:"アリスイ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dendrocopos"
ja:"アカゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picoides"
ja:"ミユビゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dryocopus"
ja:"クマゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picus"
ja:"アオゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"sapheopipo"
ja:"ノグチゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"falco"
ja:"ハヤブサ属"
rank:"genus"
upper:"falconidae"
}
{
sc:"pitta"
ja:"ヤイロチョウ属"
rank:"genus"
upper:"pittidae"
}
{
sc:"artamus"
ja:"モリツバメ属"
rank:"genus"
upper:"artamidae"
}
{
sc:"coracina"
ja:"アサクラサンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"pericrocotus"
ja:"サンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"oriolus"
ja:"コウライウグイス属"
rank:"genus"
upper:"oriolidae"
}
{
sc:"dicrurus"
ja:"オウチュウ属"
rank:"genus"
upper:"dicruridae"
}
{
sc:"hypothymis"
ja:"クロエリヒタキ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"terpsiphone"
ja:"サンコウチョウ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"lanius"
ja:"モズ属"
rank:"genus"
upper:"laniidae"
}
{
sc:"garrulus"
ja:"カケス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"cyanopica"
ja:"オナガ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"nucifraga"
ja:"ホシガラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"corvus"
ja:"カラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"regulus"
ja:"キクイタダキ属"
rank:"genus"
upper:"regulidae"
}
{
sc:"remiz"
ja:"ツリスガラ属"
rank:"genus"
upper:"remizidae"
}
{
sc:"poecile"
ja:"コガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"periparus"
ja:"ヒガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"parus"
ja:"シジュウカラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"cyanistes"
ja:"ルリガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"panurus"
ja:"ヒゲガラ属"
rank:"genus"
upper:"panuridae"
}
{
sc:"melanocorypha"
ja:"コウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"calandrella"
ja:"ヒメコウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"alauda"
ja:"ヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"eremophila"
ja:"ハマヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"riparia"
ja:"ショウドウツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"tachycineta"
ja:"ミドリツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"hirundo"
ja:"ツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"delichon"
ja:"イワツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"pycnonotus"
ja:"シロガシラ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"hypsipetes"
ja:"ヒヨドリ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"cettia"
ja:"ウグイス属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"urosphena"
ja:"ヤブサメ属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"aegithalos"
ja:"エナガ属"
rank:"genus"
upper:"aegithalidae"
}
{
sc:"phylloscopus"
ja:"ムシクイ属"
rank:"genus"
upper:"phylloscopidae"
}
{
sc:"sylvia"
ja:"ズグロムシクイ属"
rank:"genus"
upper:"sylviidae"
}
{
sc:"apalopteron"
ja:"メグロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"locustella"
ja:"センニュウ属"
rank:"genus"
upper:"locustellidae"
}
{
sc:"acrocephalus"
ja:"ヨシキリ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"iduna"
ja:"ヒメウタイムシクイ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"cisticola"
ja:"セッカ属"
rank:"genus"
upper:"cisticolidae"
}
{
sc:"bombycilla"
ja:"レンジャク属"
rank:"genus"
upper:"bombycillidae"
}
{
sc:"sitta"
ja:"ゴジュウカラ属"
rank:"genus"
upper:"sittidae"
}
{
sc:"certhia"
ja:"キバシリ属"
rank:"genus"
upper:"certhiidae"
}
{
sc:"troglodytes"
ja:"ミソサザイ属"
rank:"genus"
upper:"troglodytidae"
}
{
sc:"spodiopsar"
ja:"ムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"agropsar"
ja:"コムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnia"
ja:"カラムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"pastor"
ja:"バライロムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnus"
ja:"ホシムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"cinclus"
ja:"カワガラス属"
rank:"genus"
upper:"cinclidae"
}
{
sc:"zoothera"
ja:"トラツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cichlopasser"
ja:"オガサワラガビチョウ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"catharus"
ja:"チャツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"turdus"
ja:"ツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"erithacus"
ja:"ヨーロッパコマドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"luscinia"
ja:"ノゴマ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"tarsiger"
ja:"ルリビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"phoenicurus"
ja:"ジョウビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"saxicola"
ja:"ノビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"oenanthe"
ja:"サバクヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"monticola"
ja:"イソヒヨドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"muscicapa"
ja:"サメビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"ficedula"
ja:"キビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cyanoptila"
ja:"オオルリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"eumyias"
ja:"アイイロヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"niltava"
ja:"アオヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"prunella"
ja:"カヤクグリ属"
rank:"genus"
upper:"prunellidae"
}
{
sc:"passer"
ja:"スズメ属"
rank:"genus"
upper:"passeridae"
}
{
sc:"dendronanthus"
ja:"イワミセキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"motacilla"
ja:"セキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"anthus"
ja:"タヒバリ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"fringilla"
ja:"アトリ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chloris"
ja:"カワラヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carduelis"
ja:"マヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"leucosticte"
ja:"ハギマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"uragus"
ja:"ベニマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chaunoproctus"
ja:"オガサワラマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carpodacus"
ja:"オオマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pinicola"
ja:"ギンザンマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"loxia"
ja:"イスカ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pyrrhula"
ja:"ウソ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"coccothraustes"
ja:"シメ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"eophona"
ja:"イカル属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"calcarius"
ja:"ツメナガホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"plectrophenax"
ja:"ユキホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"setophaga"
ja:"ハゴロモムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"cardellina"
ja:"アカガオアメリカムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"emberiza"
ja:"ホオジロ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerella"
ja:"ゴマフスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"melospiza"
ja:"ウタスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"zonotrichia"
ja:"ミヤマシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerculus"
ja:"サバンナシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"bambusicola"
ja:"コジュケイ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"pavo"
ja:"クジャク属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"melopsittacus"
ja:"セキセイインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"psittacula"
ja:"ダルマインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"myiopsitta"
ja:"オキナインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"urocissa"
ja:"サンジャク属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"garrulax"
ja:"ガビチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"leiothrix"
ja:"ソウシチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"acridotheres"
ja:"ハッカチョウ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"gracupica"
ja:"クビワムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"ploceus"
ja:"キハタオリ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"euplectes"
ja:"キンランチョウ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"estrilda"
ja:"カエデチョウ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"amandava"
ja:"ベニスズメ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"lonchura"
ja:"キンパラ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"vidua"
ja:"テンニンチョウ属"
rank:"genus"
upper:"viduidae"
}
{
sc:"paroaria"
ja:"コウカンチョウ属"
rank:"genus"
upper:"thraupidae"
}
,{
"alien":false,
upper:"tetrastes"
rank:"species"
sc:"bonasia"
ja:"エゾライチョウ"
}
{
"alien":false,
upper:"lagopus"
rank:"species"
sc:"muta"
ja:"ライチョウ"
}
{
"alien":false,
upper:"coturnix"
rank:"species"
sc:"japonica"
ja:"ウズラ"
}
{
"alien":false,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":false,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":false,
upper:"dendrocygna"
rank:"species"
sc:"javanica"
ja:"リュウキュウガモ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"cygnoides"
ja:"サカツラガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"fabalis"
ja:"ヒシクイ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"anser"
ja:"ハイイロガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"erythropus"
ja:"カリガネ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"indicus"
ja:"インドガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"caerulescens"
ja:"ハクガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"canagicus"
ja:"ミカドガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"hutchinsii"
ja:"シジュウカラガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"bernicla"
ja:"コクガン"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"buccinator"
ja:"ナキハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"columbianus"
ja:"コハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"cygnus"
ja:"オオハクチョウ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"tadorna"
ja:"ツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"ferruginea"
ja:"アカツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"cristata"
ja:"カンムリツクシガモ"
}
{
"alien":false,
upper:"aix"
rank:"species"
sc:"galericulata"
ja:"オシドリ"
}
{
"alien":false,
upper:"nettapus"
rank:"species"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"falcata"
ja:"ヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"penelope"
ja:"ヒドリガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"americana"
ja:"アメリカヒドリ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"luzonica"
ja:"アカノドカルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"zonorhyncha"
ja:"カルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"discors"
ja:"ミカヅキシマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"clypeata"
ja:"ハシビロガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"acuta"
ja:"オナガガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"querquedula"
ja:"シマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"formosa"
ja:"トモエガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
upper:"netta"
rank:"species"
sc:"rufina"
ja:"アカハシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"valisineria"
ja:"オオホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"americana"
ja:"アメリカホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"ferina"
ja:"ホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"baeri"
ja:"アカハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"nyroca"
ja:"メジロガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"collaris"
ja:"クビワキンクロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"fuligula"
ja:"キンクロハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"affinis"
ja:"コスズガモ"
}
{
"alien":false,
upper:"polysticta"
rank:"species"
sc:"stelleri"
ja:"コケワタガモ"
}
{
"alien":false,
upper:"somateria"
rank:"species"
sc:"spectabilis"
ja:"ケワタガモ"
}
{
"alien":false,
upper:"histrionicus"
rank:"species"
sc:"histrionicus"
ja:"シノリガモ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"perspicillata"
ja:"アラナミキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"fusca"
ja:"ビロードキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"americana"
ja:"クロガモ"
}
{
"alien":false,
upper:"clangula"
rank:"species"
sc:"hyemalis"
ja:"コオリガモ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"albeola"
ja:"ヒメハジロ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
upper:"mergellus"
rank:"species"
sc:"albellus"
ja:"ミコアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"serrator"
ja:"ウミアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"squamatus"
ja:"コウライアイサ"
}
{
"alien":false,
upper:"tachybaptus"
rank:"species"
sc:"ruficollis"
ja:"カイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"grisegena"
ja:"アカエリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"rubricauda"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"lepturus"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
upper:"syrrhaptes"
rank:"species"
sc:"paradoxus"
ja:"サケイ"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"oenas"
ja:"ヒメモリバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"versicolor"
ja:"オガサワラカラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"jouyi"
ja:"リュウキュウカラスバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"tranquebarica"
ja:"ベニバト"
}
{
"alien":false,
upper:"chalcophaps"
rank:"species"
sc:"indica"
ja:"キンバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"formosae"
ja:"ズアカアオバト"
}
{
"alien":false,
upper:"ptilinopus"
rank:"species"
sc:"leclancheri"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"stellata"
ja:"アビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"arctica"
ja:"オオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"pacifica"
ja:"シロエリオオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"immer"
ja:"ハシグロアビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"adamsii"
ja:"ハシジロアビ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"immutabilis"
ja:"コアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"nigripes"
ja:"クロアシアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"albatrus"
ja:"アホウドリ"
}
{
"alien":false,
upper:"fulmarus"
rank:"species"
sc:"glacialis"
ja:"フルマカモメ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"solandri"
ja:"ハジロミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"externa"
ja:"オオシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"neglecta"
ja:"カワリシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"phaeopygia"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"inexpectata"
ja:"マダラシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"nigripennis"
ja:"ハグロシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"hypoleuca"
ja:"シロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"longirostris"
ja:"ヒメシロハラミズナギドリ"
}
{
"alien":false,
upper:"calonectris"
rank:"species"
sc:"leucomelas"
ja:"オオミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"pacificus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bulleri"
ja:"ミナミオナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"griseus"
ja:"ハイイロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"tenuirostris"
ja:"ハシボソミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"creatopus"
ja:"シロハラアカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"carneipes"
ja:"アカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"nativitatis"
ja:"コミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"puffinus"
ja:"マンクスミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"newelli"
ja:"ハワイセグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"lherminieri"
ja:"セグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bryani"
ja:"オガサワラヒメミズナギドリ"
}
{
"alien":false,
upper:"bulweria"
rank:"species"
sc:"bulwerii"
ja:"アナドリ"
}
{
"alien":false,
upper:"oceanites"
rank:"species"
sc:"oceanicus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"castro"
ja:"クロコシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"monorhis"
ja:"ヒメクロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"tristrami"
ja:"オーストンウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"matsudairae"
ja:"クロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"nigra"
ja:"ナベコウ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"dactylatra"
ja:"アオツラカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"sula"
ja:"アカアシカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"leucogaster"
ja:"カツオドリ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"urile"
ja:"チシマウガラス"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"carbo"
ja:"カワウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"capillatus"
ja:"ウミウ"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"onocrotalus"
ja:"モモイロペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"philippensis"
ja:"ホシバシペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"crispus"
ja:"ハイイロペリカン"
}
{
"alien":false,
upper:"botaurus"
rank:"species"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"eurhythmus"
ja:"オオヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"cinnamomeus"
ja:"リュウキュウヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"goisagi"
ja:"ミゾゴイ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"melanolophus"
ja:"ズグロミゾゴイ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"caledonicus"
ja:"ハシブトゴイ"
}
{
"alien":false,
upper:"butorides"
rank:"species"
sc:"striata"
ja:"ササゴイ"
}
{
"alien":false,
upper:"ardeola"
rank:"species"
sc:"bacchus"
ja:"アカガシラサギ"
}
{
"alien":false,
upper:"bubulcus"
rank:"species"
sc:"ibis"
ja:"アマサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"cinerea"
ja:"アオサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"purpurea"
ja:"ムラサキサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"eulophotes"
ja:"カラシラサギ"
}
{
"alien":false,
upper:"threskiornis"
rank:"species"
sc:"melanocephalus"
ja:"クロトキ"
}
{
"alien":false,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"minor"
ja:"クロツラヘラサギ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"leucogeranus"
ja:"ソデグロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"vipio"
ja:"マナヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"japonensis"
ja:"タンチョウ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"grus"
ja:"クロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"monacha"
ja:"ナベヅル"
}
{
"alien":false,
upper:"anthropoides"
rank:"species"
sc:"virgo"
ja:"アネハヅル"
}
{
"alien":false,
upper:"coturnicops"
rank:"species"
sc:"exquisitus"
ja:"シマクイナ"
}
{
"alien":false,
upper:"rallina"
rank:"species"
sc:"eurizonoides"
ja:"オオクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"okinawae"
ja:"ヤンバルクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"striatus"
ja:"ミナミクイナ"
}
{
"alien":false,
upper:"rallus"
rank:"species"
sc:"aquaticus"
ja:"クイナ"
}
{
"alien":false,
upper:"amaurornis"
rank:"species"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"porzana"
ja:"コモンクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"fusca"
ja:"ヒクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"paykullii"
ja:"コウライクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"cinerea"
ja:"マミジロクイナ"
}
{
"alien":false,
upper:"gallicrex"
rank:"species"
sc:"cinerea"
ja:"ツルクイナ"
}
{
"alien":false,
upper:"gallinula"
rank:"species"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
upper:"fulica"
rank:"species"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
upper:"otis"
rank:"species"
sc:"tarda"
ja:"ノガン"
}
{
"alien":false,
upper:"tetrax"
rank:"species"
sc:"tetrax"
ja:"ヒメノガン"
}
{
"alien":false,
upper:"centropus"
rank:"species"
sc:"bengalensis"
ja:"バンケン"
}
{
"alien":false,
upper:"clamator"
rank:"species"
sc:"coromandus"
ja:"カンムリカッコウ"
}
{
"alien":false,
upper:"eudynamys"
rank:"species"
sc:"scolopaceus"
ja:"オニカッコウ"
}
{
"alien":false,
upper:"urodynamis"
rank:"species"
sc:"taitensis"
ja:"キジカッコウ"
}
{
"alien":false,
upper:"surniculus"
rank:"species"
sc:"lugubris"
ja:"オウチュウカッコウ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"sparverioides"
ja:"オオジュウイチ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"hyperythrus"
ja:"ジュウイチ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"poliocephalus"
ja:"ホトトギス"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"optatus"
ja:"ツツドリ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"canorus"
ja:"カッコウ"
}
{
"alien":false,
upper:"caprimulgus"
rank:"species"
sc:"indicus"
ja:"ヨタカ"
}
{
"alien":false,
upper:"aerodramus"
rank:"species"
sc:"brevirostris"
ja:"ヒマラヤアナツバメ"
}
{
"alien":false,
upper:"hirundapus"
rank:"species"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"pacificus"
ja:"アマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"nipalensis"
ja:"ヒメアマツバメ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"vanellus"
ja:"タゲリ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"cinereus"
ja:"ケリ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"apricaria"
ja:"ヨーロッパムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"fulva"
ja:"ムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"dominica"
ja:"アメリカムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"squatarola"
ja:"ダイゼン"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"hiaticula"
ja:"ハジロコチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"semipalmatus"
ja:"ミズカキチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"placidus"
ja:"イカルチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"dubius"
ja:"コチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"alexandrinus"
ja:"シロチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"mongolus"
ja:"メダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"leschenaultii"
ja:"オオメダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"veredus"
ja:"オオチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"morinellus"
ja:"コバシチドリ"
}
{
"alien":false,
upper:"haematopus"
rank:"species"
sc:"ostralegus"
ja:"ミヤコドリ"
}
{
"alien":false,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
upper:"recurvirostra"
rank:"species"
sc:"avosetta"
ja:"ソリハシセイタカシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"rusticola"
ja:"ヤマシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"mira"
ja:"アマミヤマシギ"
}
{
"alien":false,
upper:"lymnocryptes"
rank:"species"
sc:"minimus"
ja:"コシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"solitaria"
ja:"アオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"hardwickii"
ja:"オオジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"stenura"
ja:"ハリオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"megala"
ja:"チュウジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"griseus"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"scolopaceus"
ja:"オオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"semipalmatus"
ja:"シベリアオオハシシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"limosa"
ja:"オグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"haemastica"
ja:"アメリカオグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"lapponica"
ja:"オオソリハシシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"minutus"
ja:"コシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"phaeopus"
ja:"チュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tahitiensis"
ja:"ハリモモチュウシャク"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tenuirostris"
ja:"シロハラチュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"arquata"
ja:"ダイシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"madagascariensis"
ja:"ホウロクシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"erythropus"
ja:"ツルシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"totanus"
ja:"アカアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"stagnatilis"
ja:"コアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"nebularia"
ja:"アオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"guttifer"
ja:"カラフトアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"melanoleuca"
ja:"オオキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"flavipes"
ja:"コキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"ochropus"
ja:"クサシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"glareola"
ja:"タカブシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"brevipes"
ja:"キアシシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"incanus"
ja:"メリケンキアシシギ"
}
{
"alien":false,
upper:"xenus"
rank:"species"
sc:"cinereus"
ja:"ソリハシシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"hypoleucos"
ja:"イソシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"macularius"
ja:"アメリカイソシギ"
}
{
"alien":false,
upper:"arenaria"
rank:"species"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"tenuirostris"
ja:"オバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"canutus"
ja:"コオバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alba"
ja:"ミユビシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"mauri"
ja:"ヒメハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ruficollis"
ja:"トウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"minuta"
ja:"ヨーロッパトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"temminckii"
ja:"オジロトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"subminuta"
ja:"ヒバリシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"fuscicollis"
ja:"コシジロウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"bairdii"
ja:"ヒメウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"melanotos"
ja:"アメリカウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"acuminata"
ja:"ウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ferruginea"
ja:"サルハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ptilocnemis"
ja:"チシマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alpina"
ja:"ハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"himantopus"
ja:"アシナガシギ"
}
{
"alien":false,
upper:"eurynorhynchus"
rank:"species"
sc:"pygmeus"
ja:"ヘラシギ"
}
{
"alien":false,
upper:"limicola"
rank:"species"
sc:"falcinellus"
ja:"キリアイ"
}
{
"alien":false,
upper:"tryngites"
rank:"species"
sc:"subruficollis"
ja:"コモンシギ"
}
{
"alien":false,
upper:"philomachus"
rank:"species"
sc:"pugnax"
ja:"エリマキシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"tricolor"
ja:"アメリカヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"lobatus"
ja:"アカエリヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"fulicarius"
ja:"ハイイロヒレアシシギ"
}
{
"alien":false,
upper:"hydrophasianus"
rank:"species"
sc:"chirurgus"
ja:"レンカク"
}
{
"alien":false,
upper:"rostratula"
rank:"species"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
upper:"turnix"
rank:"species"
sc:"suscitator"
ja:"ミフウズラ"
}
{
"alien":false,
upper:"glareola"
rank:"species"
sc:"maldivarum"
ja:"ツバメチドリ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"stolidus"
ja:"クロアジサシ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"minutus"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
upper:"procelsterna"
rank:"species"
sc:"cerulea"
ja:"ハイイロアジサシ"
}
{
"alien":false,
upper:"gygis"
rank:"species"
sc:"alba"
ja:"シロアジサシ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"tridactyla"
ja:"ミツユビカモメ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"brevirostris"
ja:"アカアシミツユビカモメ"
}
{
"alien":false,
upper:"pagophila"
rank:"species"
sc:"eburnea"
ja:"ゾウゲカモメ"
}
{
"alien":false,
upper:"xema"
rank:"species"
sc:"sabini"
ja:"クビワカモメ"
}
{
"alien":false,
upper:"rhodostethia"
rank:"species"
sc:"rosea"
ja:"ヒメクビワカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"genei"
ja:"ハシボソカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"philadelphia"
ja:"ボナパルトカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"brunnicephalus"
ja:"チャガシラカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ridibundus"
ja:"ユリカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"saundersi"
ja:"ズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"minutus"
ja:"ヒメカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"atricilla"
ja:"ワライカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"pipixcan"
ja:"アメリカズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"relictus"
ja:"ゴビズキンカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ichthyaetus"
ja:"オオズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"crassirostris"
ja:"ウミネコ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"canus"
ja:"カモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucescens"
ja:"ワシカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"hyperboreus"
ja:"シロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"thayeri"
ja:"カナダカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"argentatus"
ja:"セグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"cachinnans"
ja:"キアシセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"schistisagus"
ja:"オオセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"fuscus"
ja:"ニシセグロカモメ"
}
{
"alien":false,
upper:"gelochelidon"
rank:"species"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"caspia"
ja:"オニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bergii"
ja:"オオアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bengalensis"
ja:"ベンガルアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"albifrons"
ja:"コアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"aleutica"
ja:"コシジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"lunata"
ja:"ナンヨウマミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"fuscata"
ja:"セグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"dougallii"
ja:"ベニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"sumatrana"
ja:"エリグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"hirundo"
ja:"アジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"paradisaea"
ja:"キョクアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"hybrida"
ja:"クロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"leucopterus"
ja:"ハジロクロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"maccormicki"
ja:"オオトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"pomarinus"
ja:"トウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"parasiticus"
ja:"クロトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"longicaudus"
ja:"シロハラトウゾクカモメ"
}
{
"alien":false,
upper:"alle"
rank:"species"
sc:"alle"
ja:"ヒメウミスズメ"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"lomvia"
ja:"ハシブトウミガラス"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"aalge"
ja:"ウミガラス"
}
{
"alien":false,
upper:"alca"
rank:"species"
sc:"torda"
ja:"オオハシウミガラス"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"columba"
ja:"ウミバト"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"carbo"
ja:"ケイマフリ"
}
{
"alien":false,
upper:"brachyramphus"
rank:"species"
sc:"perdix"
ja:"マダラウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"antiquus"
ja:"ウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"wumizusume"
ja:"カンムリウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"psittacula"
ja:"ウミオウム"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pusilla"
ja:"コウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pygmaea"
ja:"シラヒゲウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"cristatella"
ja:"エトロフウミスズメ"
}
{
"alien":false,
upper:"cerorhinca"
rank:"species"
sc:"monocerata"
ja:"ウトウ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"corniculata"
ja:"ツノメドリ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"cirrhata"
ja:"エトピリカ"
}
{
"alien":false,
upper:"pandion"
rank:"species"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
upper:"pernis"
rank:"species"
sc:"ptilorhynchus"
ja:"ハチクマ"
}
{
"alien":false,
upper:"elanus"
rank:"species"
sc:"caeruleus"
ja:"カタグロトビ"
}
{
"alien":false,
upper:"milvus"
rank:"species"
sc:"migrans"
ja:"トビ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"leucocephalus"
ja:"ハクトウワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"pelagicus"
ja:"オオワシ"
}
{
"alien":false,
upper:"aegypius"
rank:"species"
sc:"monachus"
ja:"クロハゲワシ"
}
{
"alien":false,
upper:"spilornis"
rank:"species"
sc:"cheela"
ja:"カンムリワシ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"macrourus"
ja:"ウスハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"melanoleucos"
ja:"マダラチュウヒ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"soloensis"
ja:"アカハラダカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"nisus"
ja:"ハイタカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gentilis"
ja:"オオタカ"
}
{
"alien":false,
upper:"butastur"
rank:"species"
sc:"indicus"
ja:"サシバ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"buteo"
ja:"ノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"hemilasius"
ja:"オオノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"lagopus"
ja:"ケアシノスリ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"clanga"
ja:"カラフトワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"heliaca"
ja:"カタシロワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"chrysaetos"
ja:"イヌワシ"
}
{
"alien":false,
upper:"nisaetus"
rank:"species"
sc:"nipalensis"
ja:"クマタカ"
}
{
"alien":false,
upper:"tyto"
rank:"species"
sc:"longimembris"
ja:"ヒガシメンフクロウ"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"lempiji"
ja:"オオコノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"sunia"
ja:"コノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"scandiacus"
ja:"シロフクロウ"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"bubo"
ja:"ワシミミズク"
}
{
"alien":false,
upper:"ketupa"
rank:"species"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
upper:"strix"
rank:"species"
sc:"uralensis"
ja:"フクロウ"
}
{
"alien":false,
upper:"aegolius"
rank:"species"
sc:"funereus"
ja:"キンメフクロウ"
}
{
"alien":false,
upper:"ninox"
rank:"species"
sc:"scutulata"
ja:"アオバズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
upper:"upupa"
rank:"species"
sc:"epops"
ja:"ヤツガシラ"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"coromanda"
ja:"アカショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"smyrnensis"
ja:"アオショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"pileata"
ja:"ヤマショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"chloris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"miyakoensis"
ja:"ミヤコショウビン"
}
{
"alien":false,
upper:"alcedo"
rank:"species"
sc:"atthis"
ja:"カワセミ"
}
{
"alien":false,
upper:"ceyx"
rank:"species"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
upper:"megaceryle"
rank:"species"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
upper:"merops"
rank:"species"
sc:"ornatus"
ja:"ハチクイ"
}
{
"alien":false,
upper:"eurystomus"
rank:"species"
sc:"orientalis"
ja:"ブッポウソウ"
}
{
"alien":false,
upper:"jynx"
rank:"species"
sc:"torquilla"
ja:"アリスイ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"hyperythrus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"kizuki"
ja:"コゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"minor"
ja:"コアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"leucotos"
ja:"オオアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"major"
ja:"アカゲラ"
}
{
"alien":false,
upper:"picoides"
rank:"species"
sc:"tridactylus"
ja:"ミユビゲラ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"javensis"
ja:"キタタキ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"canus"
ja:"ヤマゲラ"
}
{
"alien":false,
upper:"sapheopipo"
rank:"species"
sc:"noguchii"
ja:"ノグチゲラ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"naumanni"
ja:"ヒメチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"tinnunculus"
ja:"チョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"amurensis"
ja:"アカアシチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"columbarius"
ja:"コチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"cherrug"
ja:"ワキスジハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"rusticolus"
ja:"シロハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"peregrinus"
ja:"ハヤブサ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"sordida"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"nympha"
ja:"ヤイロチョウ"
}
{
"alien":false,
upper:"artamus"
rank:"species"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
upper:"coracina"
rank:"species"
sc:"melaschistos"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
upper:"pericrocotus"
rank:"species"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
upper:"oriolus"
rank:"species"
sc:"chinensis"
ja:"コウライウグイス"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"macrocercus"
ja:"オウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"leucophaeus"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"hottentottus"
ja:"カンムリオウチュウ"
}
{
"alien":false,
upper:"hypothymis"
rank:"species"
sc:"azurea"
ja:"クロエリヒタキ"
}
{
"alien":false,
upper:"terpsiphone"
rank:"species"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"tigrinus"
ja:"チゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"cristatus"
ja:"アカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"collurio"
ja:"セアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"isabellinus"
ja:"モウコアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"excubitor"
ja:"オオモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"glandarius"
ja:"カケス"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"lidthi"
ja:"ルリカケス"
}
{
"alien":false,
upper:"cyanopica"
rank:"species"
sc:"cyanus"
ja:"オナガ"
}
{
"alien":false,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":false,
upper:"nucifraga"
rank:"species"
sc:"caryocatactes"
ja:"ホシガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"monedula"
ja:"ニシコクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"dauuricus"
ja:"コクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"frugilegus"
ja:"ミヤマガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corone"
ja:"ハシボソガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"macrorhynchos"
ja:"ハシブトガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corax"
ja:"ワタリガラス"
}
{
"alien":false,
upper:"regulus"
rank:"species"
sc:"regulus"
ja:"キクイタダキ"
}
{
"alien":false,
upper:"remiz"
rank:"species"
sc:"pendulinus"
ja:"ツリスガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"palustris"
ja:"ハシブトガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"montanus"
ja:"コガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"ater"
ja:"ヒガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"venustulus"
ja:"キバラガラ"
}
{
"alien":false,
upper:"parus"
rank:"species"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
upper:"cyanistes"
rank:"species"
sc:"cyanus"
ja:"ルリガラ"
}
{
"alien":false,
upper:"panurus"
rank:"species"
sc:"biarmicus"
ja:"ヒゲガラ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"bimaculata"
ja:"クビワコウテンシ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"mongolica"
ja:"コウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"brachydactyla"
ja:"ヒメコウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
upper:"alauda"
rank:"species"
sc:"arvensis"
ja:"ヒバリ"
}
{
"alien":false,
upper:"eremophila"
rank:"species"
sc:"alpestris"
ja:"ハマヒバリ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"paludicola"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"riparia"
ja:"ショウドウツバメ"
}
{
"alien":false,
upper:"tachycineta"
rank:"species"
sc:"bicolor"
ja:"ミドリツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"rustica"
ja:"ツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"tahitica"
ja:"リュウキュウツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"daurica"
ja:"コシアカツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"urbicum"
ja:"ニシイワツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
upper:"pycnonotus"
rank:"species"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
upper:"hypsipetes"
rank:"species"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
upper:"cettia"
rank:"species"
sc:"diphone"
ja:"ウグイス"
}
{
"alien":false,
upper:"urosphena"
rank:"species"
sc:"squameiceps"
ja:"ヤブサメ"
}
{
"alien":false,
upper:"aegithalos"
rank:"species"
sc:"caudatus"
ja:"エナガ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"trochilus"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"collybita"
ja:"チフチャフ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"sibilatrix"
ja:"モリムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"affinis"
ja:"キバラムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"schwarzi"
ja:"カラフトムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"proregulus"
ja:"カラフトムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"inornatus"
ja:"キマユムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"examinandus"
ja:"オオムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"xanthodryas"
ja:"メボソムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"plumbeitarsus"
ja:"ヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealoides"
ja:"エゾムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"coronatus"
ja:"センダイムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"ijimae"
ja:"イイジマムシクイ"
}
{
"alien":false,
upper:"sylvia"
rank:"species"
sc:"curruca"
ja:"コノドジロムシクイ"
}
{
"alien":false,
upper:"apalopteron"
rank:"species"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"erythropleurus"
ja:"チョウセンメジロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"lanceolata"
ja:"マキノセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"ochotensis"
ja:"シマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pleskei"
ja:"ウチヤマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"certhiola"
ja:"シベリアセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"fasciolata"
ja:"エゾセンニュウ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"orientalis"
ja:"オオヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"sorghophilus"
ja:"セスジコヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"agricola"
ja:"イナダヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"dumetorum"
ja:"ヤブヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"aedon"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
upper:"iduna"
rank:"species"
sc:"caligata"
ja:"ヒメウタイムシクイ"
}
{
"alien":false,
upper:"cisticola"
rank:"species"
sc:"juncidis"
ja:"セッカ"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"garrulus"
ja:"キレンジャク"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"japonica"
ja:"ヒレンジャク"
}
{
"alien":false,
upper:"sitta"
rank:"species"
sc:"europaea"
ja:"ゴジュウカラ"
}
{
"alien":false,
upper:"certhia"
rank:"species"
sc:"familiaris"
ja:"キバシリ"
}
{
"alien":false,
upper:"troglodytes"
rank:"species"
sc:"troglodytes"
ja:"ミソサザイ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"sericeus"
ja:"ギンムクドリ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"cineraceus"
ja:"ムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"sturninus"
ja:"シベリアムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"philippensis"
ja:"コムクドリ"
}
{
"alien":false,
upper:"sturnia"
rank:"species"
sc:"sinensis"
ja:"カラムクドリ"
}
{
"alien":false,
upper:"pastor"
rank:"species"
sc:"roseus"
ja:"バライロムクドリ"
}
{
"alien":false,
upper:"sturnus"
rank:"species"
sc:"vulgaris"
ja:"ホシムクドリ"
}
{
"alien":false,
upper:"cinclus"
rank:"species"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"sibirica"
ja:"マミジロ"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"dauma"
ja:"トラツグミ"
}
{
"alien":false,
upper:"cichlopasser"
rank:"species"
sc:"terrestris"
ja:"オガサワラガビチョウ"
}
{
"alien":false,
upper:"catharus"
rank:"species"
sc:"minimus"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"hortulorum"
ja:"カラアカハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"cardis"
ja:"クロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"merula"
ja:"クロウタドリ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"obscurus"
ja:"マミチャジナイ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pallidus"
ja:"シロハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"chrysolaus"
ja:"アカハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"celaenops"
ja:"アカコッコ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"ruficollis"
ja:"ノドグロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"naumanni"
ja:"ツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pilaris"
ja:"ノハラツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"iliacus"
ja:"ワキアカツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"viscivorus"
ja:"ヤドリギツグミ"
}
{
"alien":false,
upper:"erithacus"
rank:"species"
sc:"rubecula"
ja:"ヨーロッパコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"calliope"
ja:"ノゴマ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"cyane"
ja:"コルリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"sibilans"
ja:"シマゴマ"
}
{
"alien":false,
upper:"tarsiger"
rank:"species"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"erythronotus"
ja:"セアカジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"ochruros"
ja:"クロジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"rubetra"
ja:"マミジロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"torquatus"
ja:"ノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"caprata"
ja:"クロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"ferreus"
ja:"ヤマザキヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"isabellina"
ja:"イナバヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"pleschanka"
ja:"セグロサバクヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"deserti"
ja:"サバクヒタキ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"solitarius"
ja:"イソヒヨドリ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"gularis"
ja:"ヒメイソヒヨ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"striata"
ja:"ムナフヒタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"griseisticta"
ja:"エゾビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"ferruginea"
ja:"ミヤマヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"hypoleuca"
ja:"マダラヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"zanthopygia"
ja:"マミジロキビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"mugimaki"
ja:"ムギマキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"albicilla"
ja:"オジロビタキ"
}
{
"alien":false,
upper:"cyanoptila"
rank:"species"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
upper:"eumyias"
rank:"species"
sc:"thalassinus"
ja:"ロクショウヒタキ"
}
{
"alien":false,
upper:"niltava"
rank:"species"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"collaris"
ja:"イワヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"montanella"
ja:"ヤマヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"rubida"
ja:"カヤクグリ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"montanus"
ja:"スズメ"
}
{
"alien":false,
upper:"dendronanthus"
rank:"species"
sc:"indicus"
ja:"イワミセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"flava"
ja:"ツメナガセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"alba"
ja:"ハクセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"grandis"
ja:"セグロセキレイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"godlewskii"
ja:"コマミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"pratensis"
ja:"マキバタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"roseatus"
ja:"ウスベニタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"cervinus"
ja:"ムネアカタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"rubescens"
ja:"タヒバリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"montifringilla"
ja:"アトリ"
}
{
"alien":false,
upper:"chloris"
rank:"species"
sc:"sinica"
ja:"カワラヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"spinus"
ja:"マヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"hornemanni"
ja:"コベニヒワ"
}
{
"alien":false,
upper:"leucosticte"
rank:"species"
sc:"arctoa"
ja:"ハギマシコ"
}
{
"alien":false,
upper:"uragus"
rank:"species"
sc:"sibiricus"
ja:"ベニマシコ"
}
{
"alien":false,
upper:"chaunoproctus"
rank:"species"
sc:"ferreorostris"
ja:"オガサワラマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"erythrinus"
ja:"アカマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"roseus"
ja:"オオマシコ"
}
{
"alien":false,
upper:"pinicola"
rank:"species"
sc:"enucleator"
ja:"ギンザンマシコ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"curvirostra"
ja:"イスカ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"leucoptera"
ja:"ナキイスカ"
}
{
"alien":false,
upper:"pyrrhula"
rank:"species"
sc:"pyrrhula"
ja:"ウソ"
}
{
"alien":false,
upper:"coccothraustes"
rank:"species"
sc:"coccothraustes"
ja:"シメ"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
upper:"calcarius"
rank:"species"
sc:"lapponicus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
upper:"plectrophenax"
rank:"species"
sc:"nivalis"
ja:"ユキホオジロ"
}
{
"alien":false,
upper:"setophaga"
rank:"species"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
upper:"cardellina"
rank:"species"
sc:"pusilla"
ja:"ウィルソンアメリカムシクイ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"lathami"
ja:"レンジャクノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"citrinella"
ja:"キアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"cioides"
ja:"ホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"buchanani"
ja:"イワバホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"hortulana"
ja:"ズアオホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"tristrami"
ja:"シロハラホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pusilla"
ja:"コホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"chrysophrys"
ja:"キマユホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rustica"
ja:"カシラダカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"aureola"
ja:"シマアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rutila"
ja:"シマノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"melanocephala"
ja:"ズグロチャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"bruniceps"
ja:"チャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"sulphurata"
ja:"ノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"spodocephala"
ja:"アオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"variabilis"
ja:"クロジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pallasi"
ja:"シベリアジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"schoeniclus"
ja:"オオジュリン"
}
{
"alien":false,
upper:"passerella"
rank:"species"
sc:"iliaca"
ja:"ゴマフスズメ"
}
{
"alien":false,
upper:"melospiza"
rank:"species"
sc:"melodia"
ja:"ウタスズメ"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"leucophrys"
ja:"ミヤマシトド"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"atricapilla"
ja:"キガシラシトド"
}
{
"alien":false,
upper:"passerculus"
rank:"species"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
upper:"bambusicola"
rank:"species"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":true,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":true,
upper:"pavo"
rank:"species"
sc:"cristatus"
ja:"インドクジャク"
}
{
"alien":true,
upper:"branta"
rank:"species"
sc:"canadensis"
ja:"カナダガン"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"atratus"
ja:"コクチョウ"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":true,
upper:"columba"
rank:"species"
sc:"livia"
ja:"カワラバト(ドバト)"
}
{
"alien":true,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":true,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":true,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":true,
upper:"melopsittacus"
rank:"species"
sc:"undulatus"
ja:"セキセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"eupatria"
ja:"オオホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"krameri"
ja:"ホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"alexandri"
ja:"ダルマインコ"
}
{
"alien":true,
upper:"myiopsitta"
rank:"species"
sc:"monachus"
ja:"オキナインコ"
}
{
"alien":true,
upper:"urocissa"
rank:"species"
sc:"caerulea"
ja:"ヤマムスメ"
}
{
"alien":true,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":true,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"canorus"
ja:"ガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"cineraceus"
ja:"ヒゲガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"perspicillatus"
ja:"カオグロガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"sannio"
ja:"カオジロガビチョウ"
}
{
"alien":true,
upper:"leiothrix"
rank:"species"
sc:"lutea"
ja:"ソウシチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"javanicus"
ja:"モリハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"ginginianus"
ja:"ハイイロハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
upper:"gracupica"
rank:"species"
sc:"contra"
ja:"ホオジロムクドリ"
}
{
"alien":true,
upper:"ploceus"
rank:"species"
sc:"intermedius"
ja:"メンハタオリドリ"
}
{
"alien":true,
upper:"euplectes"
rank:"species"
sc:"orix"
ja:"オオキンランチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"melpoda"
ja:"ホオアカカエデチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"troglodytes"
ja:"カエデチョウ"
}
{
"alien":true,
upper:"amandava"
rank:"species"
sc:"amandava"
ja:"ベニスズメ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"striata"
ja:"コシジロキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"punctulata"
ja:"シマキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"malacca"
ja:"ギンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"atricapilla"
ja:"キンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"maja"
ja:"ヘキチョウ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"oryzivora"
ja:"ブンチョウ"
}
{
"alien":true,
upper:"vidua"
rank:"species"
sc:"paradisaea"
ja:"ホウオウジャク"
}
{
"alien":true,
upper:"paroaria"
rank:"species"
sc:"coronata"
ja:"コウカンチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bonasia"
sc:"vicinitas"
ja:"エゾライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"muta"
sc:"japonica"
ja:"ライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"intermedius"
ja:"シコクヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"soemmerringii"
ja:"アカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"ijimae"
ja:"コシジロヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"robustipes"
ja:"キジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tohkaidi"
ja:"トウカイキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tanensis"
ja:"シマキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"versicolor"
ja:"キュウシュウキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"middendorffii"
ja:"オオヒシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"curtus"
ja:"ヒメヒシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"serrirostris"
ja:"ヒシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"anser"
sc:"rubrirostris"
ja:"ハイイロガン"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"caerulescens"
ja:"ハクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"atlanticus"
ja:"オオハクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"leucopareia"
ja:"シジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"minima"
ja:"ヒメシジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bernicla"
sc:"orientalis"
ja:"コクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"jankowskyi"
ja:"コハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"columbianus"
ja:"アメリカコハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromandelianus"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
rank:"subspecies"
upper:"strepera"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"platyrhynchos"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"carolinensis"
ja:"アメリカコガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"marila"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"stejnegeri"
ja:"ビロードキンクロ"
}
{
"alien":false,
rank:"subspecies"
upper:"clangula"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"orientalis"
ja:"コカワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"poggei"
ja:"カイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"kunikyonis"
ja:"ダイトウカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"grisegena"
sc:"holbollii"
ja:"アカエリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"auritus"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"nigricollis"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubricauda"
sc:"rothschildi"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"lepturus"
sc:"dorotheae"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"oenas"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"nitens"
ja:"アカガシラカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"stejnegeri"
ja:"ヨナグニカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"stimpsoni"
ja:"リュウキュウキジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
rank:"subspecies"
upper:"tranquebarica"
sc:"humilis"
ja:"ベニバト"
}
{
"alien":false,
rank:"subspecies"
upper:"indica"
sc:"yamashinai"
ja:"キンバト"
}
{
"alien":false,
rank:"subspecies"
upper:"sieboldii"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"permagnus"
ja:"ズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"medioximus"
ja:"チュウダイズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"leclancheri"
sc:"taiwanus"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"arctica"
sc:"viridigularis"
ja:"オオハム"
}
{
"alien":false,
rank:"subspecies"
upper:"glacialis"
sc:"rodgersii"
ja:"フルマカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopygia"
sc:"sandwichensis"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"cuneatus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lherminieri"
sc:"bannermani"
ja:"セグロミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"oceanicus"
sc:"exasperatus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorhoa"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"furcata"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ariel"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dactylatra"
sc:"personata"
ja:"アオツラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sula"
sc:"rubripes"
ja:"アカアシカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"plotus"
ja:"カツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"brewsteri"
ja:"シロガシラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pelagicus"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
rank:"subspecies"
upper:"carbo"
sc:"hanedae"
ja:"カワウ"
}
{
"alien":false,
rank:"subspecies"
upper:"stellaris"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"bryani"
ja:"マリアナヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flavicollis"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"nycticorax"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caledonicus"
sc:"crassirostris"
ja:"ハシブトゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"amurensis"
ja:"ササゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"ibis"
sc:"coromandus"
ja:"アマサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"jouyi"
ja:"アオサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"purpurea"
sc:"manilensis"
ja:"ムラサキサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"modesta"
ja:"チュウダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"intermedia"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"garzetta"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"sacra"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorodia"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canadensis"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"grus"
sc:"lilfordi"
ja:"クロヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"eurizonoides"
sc:"sepiaria"
ja:"オオクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"striatus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aquaticus"
sc:"indicus"
ja:"クイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"erythrothorax"
ja:"ヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"phaeopyga"
ja:"リュウキュウヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"brevipes"
ja:"マミジロクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"chloropus"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
rank:"subspecies"
upper:"atra"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
rank:"subspecies"
upper:"tarda"
sc:"dybowskii"
ja:"ノガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"lignator"
ja:"バンケン"
}
{
"alien":false,
rank:"subspecies"
upper:"scolopaceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"micropterus"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"canorus"
sc:"telephonus"
ja:"カッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"indicus"
sc:"jotaka"
ja:"ヨタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"brevirostris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"caudacutus"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"pacificus"
ja:"キタアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"kurodae"
ja:"アマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"kuntzi"
ja:"ヒメアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"apricaria"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"hiaticula"
sc:"tundrae"
ja:"ハジロコチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dubius"
sc:"curonicus"
ja:"コチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"alexandrinus"
ja:"ハシボソシロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"dealbatus"
ja:"シロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"mongolus"
ja:"モウコメダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"stegmanni"
ja:"メダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ostralegus"
sc:"osculans"
ja:"ミヤコドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"leucocephalus"
ja:"オーストラリアセイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitaria"
sc:"japonica"
ja:"アオシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"gallinago"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"griseus"
sc:"hendersoni"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"limosa"
sc:"melanuroides"
ja:"オグロシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"menzbieri"
ja:"コシジロオオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"baueri"
ja:"オオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopus"
sc:"variegatus"
ja:"チュウシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"arquata"
sc:"orientalis"
ja:"ダイシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"totanus"
sc:"ussuriensis"
ja:"アカアシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"interpres"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canutus"
sc:"rogersi"
ja:"コオバシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilocnemis"
sc:"quarta"
ja:"チシマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"sakhalina"
ja:"ハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"arcticola"
ja:"キタアラスカハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"falcinellus"
sc:"sibirica"
ja:"キリアイ"
}
{
"alien":false,
rank:"subspecies"
upper:"benghalensis"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"suscitator"
sc:"okinavensis"
ja:"ミフウズラ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pileatus"
ja:"クロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pullus"
ja:"リュウキュウクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"minutus"
sc:"marcusi"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cerulea"
sc:"saxatilis"
ja:"ハイイロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"candida"
ja:"シロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactyla"
sc:"pollicaris"
ja:"ミツユビカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"atricilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"kamtschatschensis"
ja:"カモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"heinei"
ja:"ニシシベリアカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"brachyrhynchus"
ja:"コカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperboreus"
sc:"pallidissimus"
ja:"シロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"kumlieni"
ja:"クムリーンアイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"vegae"
ja:"セグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"smithsonianus"
ja:"アメリカセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"cachinnans"
sc:"mongolicus"
ja:"キアシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"heuglini"
ja:"ニシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"nilotica"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bergii"
sc:"cristata"
ja:"オオアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"sinensis"
ja:"コアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"anaethetus"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscata"
sc:"nubilosa"
ja:"セグロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"dougallii"
sc:"bangsi"
ja:"ベニアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"minussensis"
ja:"アカアシアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"longipennis"
ja:"アジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hybrida"
sc:"javanicus"
ja:"クロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"surinamensis"
ja:"アメリカハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alle"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lomvia"
sc:"arra"
ja:"ハシブトウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"aalge"
sc:"inornata"
ja:"ウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"torda"
sc:"islandica"
ja:"オオハシウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"snowi"
ja:"ウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"kaiurka"
ja:"アリューシャンウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"haliaetus"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilorhynchus"
sc:"orientalis"
ja:"ハチクマ"
}
{
"alien":false,
rank:"subspecies"
upper:"caeruleus"
sc:"hypoleucus"
ja:"カタグロトビ"
}
{
"alien":false,
rank:"subspecies"
upper:"migrans"
sc:"lineatus"
ja:"トビ"
}
{
"alien":false,
rank:"subspecies"
upper:"albicilla"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalus"
sc:"washingtoniensis"
ja:"ハクトウワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheela"
sc:"perplexus"
ja:"カンムリワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"aeruginosus"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"spilonotus"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyaneus"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"iwasakii"
ja:"リュウキュウツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"nisus"
sc:"nisosimilis"
ja:"ハイタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"albidus"
ja:"シロオオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"fujiyamae"
ja:"オオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"japonicus"
ja:"ノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"toyoshimai"
ja:"オガサワラノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"oshiroi"
ja:"ダイトウノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lagopus"
sc:"menzbieri"
ja:"ケアシノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysaetos"
sc:"japonica"
ja:"イヌワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"orientalis"
ja:"クマタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"longimembris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"ussuriensis"
ja:"サメイロオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"semitorques"
ja:"オオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"pryeri"
ja:"リュウキュウオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"sunia"
sc:"japonicus"
ja:"コノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"interpositus"
ja:"ダイトウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"kiautschensis"
ja:"タイリクワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"borissowi"
ja:"ワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"blakistoni"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"japonica"
ja:"エゾフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"hondoensis"
ja:"フクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"momiyamae"
ja:"モミヤマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"fuscescens"
ja:"キュウシュウフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"funereus"
sc:"magnus"
ja:"キンメフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"macroptera"
ja:"チョウセンアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"japonica"
ja:"アオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"totogo"
ja:"リュウキュウアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"otus"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
rank:"subspecies"
upper:"flammeus"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"epops"
sc:"saturata"
ja:"ヤツガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"major"
ja:"アカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"bangsi"
ja:"リュウキュウアカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"smyrnensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"chloris"
sc:"collaris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"atthis"
sc:"bengalensis"
ja:"カワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"erithaca"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"pallida"
ja:"エゾヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"calonyx"
ja:"ブッポウソウ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"chinensis"
ja:"シベリアアリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"japonica"
ja:"アリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperythrus"
sc:"subrufinus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"seebohmi"
ja:"エゾコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nippon"
ja:"コゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"matsudairai"
ja:"ミヤケコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"shikokuensis"
ja:"シコクコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kotataki"
ja:"ツシマコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kizuki"
ja:"キュウシュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"amamii"
ja:"アマミコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nigrescens"
ja:"リュウキュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"orii"
ja:"オリイコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amurensis"
ja:"コアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"subcirris"
ja:"エゾオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"stejnegeri"
ja:"オオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"namiyei"
ja:"ナミエオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"owstoni"
ja:"オーストンオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"brevirostris"
ja:"ハシブトアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"japonicus"
ja:"エゾアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"hondoensis"
ja:"アカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactylus"
sc:"inouyei"
ja:"ミユビゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"javensis"
sc:"richardsi"
ja:"キタタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"martius"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"horii"
ja:"カゴシマアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"takatsukasae"
ja:"タネアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"jessoensis"
ja:"ヤマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tinnunculus"
sc:"interstinctus"
ja:"チョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"insignis"
ja:"コチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"pacificus"
ja:"ヒガシコチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"subbuteo"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"cherrug"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"japonensis"
ja:"ハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"pealei"
ja:"オオハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"furuitii"
ja:"シマハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"anatum"
ja:"アメリカハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"sordida"
sc:"cucullata"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorynchus"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melaschistos"
sc:"intermedia"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"tegimae"
ja:"リュウキュウサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"chinensis"
sc:"diffusus"
ja:"コウライウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrocercus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophaeus"
sc:"leucogenis"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"hottentottus"
sc:"brevirostris"
ja:"カンムリオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"azurea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"illex"
ja:"リュウキュウサンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bucephalus"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"lucionensis"
ja:"シマアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"superciliosus"
ja:"アカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"collurio"
sc:"pallidifrons"
ja:"セアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"isabellinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"schach"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"mollis"
ja:"シベリアオオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"bianchii"
ja:"オオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"sphenocercus"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"brandtii"
ja:"ミヤマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"japonicus"
ja:"カケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"tokugawae"
ja:"サドカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"orii"
ja:"ヤクシマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"japonica"
ja:"オナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"macrorhynchos"
ja:"ハシナガホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"japonica"
ja:"ホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"monedula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"frugilegus"
sc:"pastinator"
ja:"ミヤマガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corone"
sc:"orientalis"
ja:"ハシボソガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"mandshuricus"
ja:"チョウセンハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"japonensis"
ja:"ハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"connectens"
ja:"リュウキュウハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"osai"
ja:"オサハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corax"
sc:"kamtschaticus"
ja:"ワタリガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"regulus"
sc:"japonensis"
ja:"キクイタダキ"
}
{
"alien":false,
rank:"subspecies"
upper:"pendulinus"
sc:"consobrinus"
ja:"ツリスガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"palustris"
sc:"hensoni"
ja:"ハシブトガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"sachalinensis"
ja:"カラフトコガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"restrictus"
ja:"コガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"namiyei"
ja:"ナミエヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"owstoni"
ja:"オーストンヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"orii"
ja:"ダイトウヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"sunsunpi"
ja:"タネヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"yakushimensis"
ja:"ヤクシマヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"amamii"
ja:"アマミヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"olivaceus"
ja:"オリイヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"ater"
sc:"insularis"
ja:"ヒガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amamiensis"
ja:"アマミシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"okinawae"
ja:"オキナワシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"nigriloris"
ja:"イシガキシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"tianschanicus"
ja:"ルリガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"biarmicus"
sc:"russicus"
ja:"ヒゲガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"bimaculata"
sc:"torquata"
ja:"クビワコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"brachydactyla"
sc:"longipennis"
ja:"ヒメコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheleensis"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"pekinensis"
ja:"オオヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"lonnbergi"
ja:"カラフトチュウヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"japonica"
ja:"ヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpestris"
sc:"flava"
ja:"ハマヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"paludicola"
sc:"chinensis"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"riparia"
sc:"ijimae"
ja:"ショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"saturata"
ja:"アカハラツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"gutturalis"
ja:"ツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"tahitica"
sc:"namiyei"
ja:"リュウキュウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"daurica"
sc:"japonica"
ja:"コシアカツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"urbicum"
sc:"lagopodum"
ja:"ニシイワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"dasypus"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"squamiceps"
ja:"オガサワラヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"magnirostris"
ja:"ハシブトヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"borodinonis"
ja:"ダイトウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"ogawae"
ja:"アマミヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"pryeri"
ja:"リュウキュウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"stejnegeri"
ja:"イシガキヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"nagamichii"
ja:"タイワンヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"sakhalinensis"
ja:"カラフトウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"borealis"
ja:"チョウセンウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"cantans"
ja:"ウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"diphone"
ja:"ハシナガウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"restricta"
ja:"ダイトウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"riukiuensis"
ja:"リュウキュウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"japonicus"
ja:"シマエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"magnus"
ja:"チョウセンエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"trivirgatus"
ja:"エナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"kiusiuensis"
ja:"キュウシュウエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"trochilus"
sc:"yakutensis"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"collybita"
sc:"tristis"
ja:"チフチャフ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscatus"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"affinis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"kennicotti"
ja:"アメリカコムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"curruca"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"hahasima"
ja:"ハハジマメグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"daitoensis"
ja:"ダイトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"insularis"
ja:"シマメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"loochooensis"
ja:"リュウキュウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"certhiola"
sc:"rubescens"
ja:"シベリアセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"pryeri"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"fasciolata"
sc:"amnicola"
ja:"エゾセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bistrigiceps"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"agricola"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aedon"
sc:"stegmanni"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"juncidis"
sc:"brunniceps"
ja:"セッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"garrulus"
sc:"centralasiae"
ja:"キレンジャク"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"asiatica"
ja:"シロハラゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"amurensis"
ja:"ゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"roseilia"
ja:"キュウシュウゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"daurica"
ja:"キタキバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"japonica"
ja:"キバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"dauricus"
ja:"チョウセンミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"fumigatus"
ja:"ミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"mosukei"
ja:"モスケミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"orii"
ja:"ダイトウミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"ogawae"
ja:"オガワミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"vulgaris"
sc:"poltaratskyi"
ja:"ホシムクドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasii"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"davisoni"
ja:"マミジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"aurea"
ja:"トラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"major"
ja:"オオトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"iriomotensis"
ja:"コトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"minimus"
sc:"aliciae"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"merula"
sc:"mandarinus"
ja:"クロウタドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"orii"
ja:"オオアカハラ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"chrysolaus"
ja:"アカハラ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"atrogularis"
ja:"ノドグロツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"ruficollis"
ja:"ノドアカツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"eunomus"
ja:"ツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"naumanni"
ja:"ハチジョウツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"iliacus"
sc:"iliacus"
ja:"ワキアカツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"viscivorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"rubecula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"tanensis"
ja:"タネコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"namiyei"
ja:"ホントウアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"subrufus"
ja:"ウスアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"svecica"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyane"
sc:"bochaiensis"
ja:"コルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanurus"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"ochruros"
sc:"rufiventris"
ja:"クロジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"auroreus"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquatus"
sc:"stejnegeri"
ja:"ノビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"caprata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"oenanthe"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"deserti"
sc:"oreophila"
ja:"サバクヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"pandoo"
ja:"アオハライソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"philippensis"
ja:"イソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"mongola"
ja:"ムナフヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauurica"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"hypoleuca"
sc:"sibirica"
ja:"マダラヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"owstoni"
ja:"リュウキュウキビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cumatilis"
ja:"チョウセンオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"thalassinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"vivida"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"collaris"
sc:"erythropygia"
ja:"イワヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanella"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"domesticus"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rutilans"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"saturatus"
ja:"スズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"plexa"
ja:"シベリアツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"leucocephala"
ja:"カオジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"macronyx"
ja:"キタツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"simillima"
ja:"マミジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"taivana"
ja:"ツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"citreola"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"dukhunensis"
ja:"ニシシベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"personata"
ja:"メンガタハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alboides"
ja:"ネパールハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"baicalensis"
ja:"シベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"ocularis"
ja:"タイワンハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"lugens"
ja:"ハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"leucopsis"
ja:"ホオジロハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"richardi"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pratensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"trivialis"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"yunnanensis"
ja:"カラフトビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"gustavi"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubescens"
sc:"japonicus"
ja:"タヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"coelebs"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kawarahiba"
ja:"オオカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"minor"
ja:"カワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kittlitzi"
ja:"オガサワラカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"flammea"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"hornemanni"
sc:"exilipes"
ja:"コベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"arctoa"
sc:"brunneonucha"
ja:"ハギマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibiricus"
sc:"sanguinolentus"
ja:"ベニマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"erythrinus"
sc:"grebnitskii"
ja:"アカマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"kamtschatkensis"
ja:"コバシギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"sakhalinensis"
ja:"ギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"curvirostra"
sc:"japonica"
ja:"イスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucoptera"
sc:"bifasciata"
ja:"ナキイスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"cassinii"
ja:"ベニバラウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"rosacea"
ja:"アカウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"griseiventris"
ja:"ウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"coccothraustes"
ja:"シベリアシメ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"japonicus"
ja:"シメ"
}
{
"alien":false,
rank:"subspecies"
upper:"migratoria"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
rank:"subspecies"
upper:"personata"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponicus"
sc:"coloratus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"vlasowae"
ja:"ユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"townsendi"
ja:"オオユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"coronata"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"citrinella"
sc:"erythrogenys"
ja:"キアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalos"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"cioides"
sc:"ciopsis"
ja:"ホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"buchanani"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"fucata"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"latifascia"
ja:"カシラダカ"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"aureola"
sc:"ornata"
ja:"シマアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"spodocephala"
ja:"シベリアアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"personata"
ja:"アオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"polaris"
ja:"シベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"pallasi"
ja:"オオシベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"yessoensis"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"schoeniclus"
sc:"pyrrhulina"
ja:"オオジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"iliaca"
sc:"unalaschcensis"
ja:"ゴマフスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melodia"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophrys"
sc:"gambelii"
ja:"ミヤマシトド"
}
{
"alien":false,
rank:"subspecies"
upper:"sandwichensis"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"sonorivox"
ja:"テッケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"karpowi"
ja:"コウライキジ"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"canadensis"
sc:"moffitti"
ja:"オオカナダガン"
}
{
"alien":true,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
rank:"subspecies"
upper:"himantopus"
sc:"mexicanus"
ja:"クロエリセイタカシギ"
}
{
"alien":true,
rank:"subspecies"
upper:"krameri"
sc:"manillensis"
ja:"ワカケホンセイインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"alexandri"
sc:"fasciata"
ja:"ダルマインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"monachus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"canorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cineraceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"sannio"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"lutea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cristatellus"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
rank:"subspecies"
upper:"tristis"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
rank:"subspecies"
upper:"contra"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"intermedius"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"orix"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"melpoda"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"amandava"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"striata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"punctulata"
sc:"topela"
ja:"シマキンパラ"
}
{
"alien":true,
rank:"subspecies"
upper:"atricapilla"
sc:"ssp."
ja:"亜種不明"
}
]
db.createCollection 'names'
rank_relationships =
family: "order"
genus: "family"
species: "genus"
subspecies: "species"
for name in data_names
if db.names.find(name).count() is 0
if name.upper
upper_rank = rank_relationships[name.rank]
upper_id = db.names.find({
rank: upper_rank
sc: name.upper
})[0]._id
name.upper = upper_rank
name.upper_id = upper_id
db.names.insert name
| 66179 | # mongo birdAPI
# this is db import script
db.dropDatabase()
data_names = [
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"podicipediformes"
ja:"カイツブリ目"
rank:"order"
}
{
sc:"phaethontiformes"
ja:"ネッタイチョウ目"
rank:"order"
}
{
sc:"pterocliformes"
ja:"サケイ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"gaviiformes"
ja:"アビ目"
rank:"order"
}
{
sc:"procellariiformes"
ja:"ミズナギドリ目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"suliformes"
ja:"カツオドリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"gruiformes"
ja:"ツル目"
rank:"order"
}
{
sc:"otidiformes"
ja:"ノガン目"
rank:"order"
}
{
sc:"cuculiformes"
ja:"カッコウ目"
rank:"order"
}
{
sc:"caprimulgiformes"
ja:"ヨタカ目"
rank:"order"
}
{
sc:"apodiformes"
ja:"アマツバメ目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"accipitriformes"
ja:"タカ目"
rank:"order"
}
{
sc:"strigiformes"
ja:"フクロウ目"
rank:"order"
}
{
sc:"bucerotiformes"
ja:"サイチョウ目"
rank:"order"
}
{
sc:"coraciiformes"
ja:"ブッポウソウ目"
rank:"order"
}
{
sc:"piciformes"
ja:"キツツキ目"
rank:"order"
}
{
sc:"falconiformes"
ja:"ハヤブサ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"psittaciformes"
ja:"インコ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"phasianidae"
ja:"キジ科"
rank:"family"
upper:"galliformes"
}
{
sc:"anatidae"
ja:"カモ科"
rank:"family"
upper:"anseriformes"
}
{
sc:"podicipedidae"
ja:"カイツブリ科"
rank:"family"
upper:"podicipediformes"
}
{
sc:"phaethontidae"
ja:"ネッタイチョウ科"
rank:"family"
upper:"phaethontiformes"
}
{
sc:"pteroclidae"
ja:"サケイ科"
rank:"family"
upper:"pterocliformes"
}
{
sc:"columbidae"
ja:"ハト科"
rank:"family"
upper:"columbiformes"
}
{
sc:"gaviidae"
ja:"アビ科"
rank:"family"
upper:"gaviiformes"
}
{
sc:"diomedeidae"
ja:"アホウドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"procellariidae"
ja:"ミズナギドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"hydrobatidae"
ja:"ウミツバメ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"ciconiidae"
ja:"コウノトリ科"
rank:"family"
upper:"ciconiiformes"
}
{
sc:"fregatidae"
ja:"グンカンドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"sulidae"
ja:"カツオドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"phalacrocoracidae"
ja:"ウ科"
rank:"family"
upper:"suliformes"
}
{
sc:"pelecanidae"
ja:"ペリカン科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"ardeidae"
ja:"サギ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"threskiornithidae"
ja:"トキ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"gruidae"
ja:"ツル科"
rank:"family"
upper:"gruiformes"
}
{
sc:"rallidae"
ja:"クイナ科"
rank:"family"
upper:"gruiformes"
}
{
sc:"otididae"
ja:"ノガン科"
rank:"family"
upper:"otidiformes"
}
{
sc:"cuculidae"
ja:"カッコウ科"
rank:"family"
upper:"cuculiformes"
}
{
sc:"caprimulgidae"
ja:"ヨタカ科"
rank:"family"
upper:"caprimulgiformes"
}
{
sc:"apodidae"
ja:"アマツバメ科"
rank:"family"
upper:"apodiformes"
}
{
sc:"charadriidae"
ja:"チドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"haematopodidae"
ja:"ミヤコドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"recurvirostridae"
ja:"セイタカシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"scolopacidae"
ja:"シギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"jacanidae"
ja:"レンカク科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"rostratulidae"
ja:"タマシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"turnicidae"
ja:"ミフウズラ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"glareolidae"
ja:"ツバメチドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"laridae"
ja:"カモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"stercorariidae"
ja:"トウゾクカモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"alcidae"
ja:"ウミスズメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"pandionidae"
ja:"ミサゴ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"accipitridae"
ja:"タカ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"tytonidae"
ja:"メンフクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"strigidae"
ja:"フクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"upupidae"
ja:"ヤツガシラ科"
rank:"family"
upper:"bucerotiformes"
}
{
sc:"alcedinidae"
ja:"カワセミ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"meropidae"
ja:"ハチクイ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"coraciidae"
ja:"ブッポウソウ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"picidae"
ja:"キツツキ科"
rank:"family"
upper:"piciformes"
}
{
sc:"falconidae"
ja:"ハヤブサ科"
rank:"family"
upper:"falconiformes"
}
{
sc:"pittidae"
ja:"ヤイロチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"artamidae"
ja:"モリツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"campephagidae"
ja:"サンショウクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"oriolidae"
ja:"コウライウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"dicruridae"
ja:"オウチュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"monarchidae"
ja:"カササギヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"laniidae"
ja:"モズ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"corvidae"
ja:"カラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"regulidae"
ja:"キクイタダキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"remizidae"
ja:"ツリスガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"paridae"
ja:"シジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"panuridae"
ja:"ヒゲガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"alaudidae"
ja:"ヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"hirundinidae"
ja:"ツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"pycnonotidae"
ja:"ヒヨドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cettiidae"
ja:"ウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"aegithalidae"
ja:"エナガ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"phylloscopidae"
ja:"ムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sylviidae"
ja:"ズグロムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"zosteropidae"
ja:"メジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"locustellidae"
ja:"センニュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"acrocephalidae"
ja:"ヨシキリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cisticolidae"
ja:"セッカ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"bombycillidae"
ja:"レンジャク科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sittidae"
ja:"ゴジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"certhiidae"
ja:"キバシリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"troglodytidae"
ja:"ミソサザイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sturnidae"
ja:"ムクドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cinclidae"
ja:"カワガラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"muscicapidae"
ja:"ヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"prunellidae"
ja:"イワヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"passeridae"
ja:"スズメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"motacillidae"
ja:"セキレイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"fringillidae"
ja:"アトリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"calcariidae"
ja:"ツメナガホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"parulidae"
ja:"アメリカムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"emberizidae"
ja:"ホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"psittacidae"
ja:"インコ科"
rank:"family"
upper:"psittaciformes"
}
{
sc:"timaliidae"
ja:"チメドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"ploceidae"
ja:"ハタオリドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"estrildidae"
ja:"カエデチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"viduidae"
ja:"テンニンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"thraupidae"
ja:"フウキンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"tetrastes"
ja:"エゾライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"lagopus"
ja:"ライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"coturnix"
ja:"ウズラ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"dendrocygna"
ja:"リュウキュウガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anser"
ja:"マガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tadorna"
ja:"ツクシガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aix"
ja:"オシドリ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"nettapus"
ja:"ナンキンオシ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anas"
ja:"マガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"netta"
ja:"アカハシハジロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aythya"
ja:"スズガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"polysticta"
ja:"コケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"somateria"
ja:"ケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"histrionicus"
ja:"シノリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"melanitta"
ja:"ビロードキンクロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"clangula"
ja:"コオリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"bucephala"
ja:"ホオジロガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergellus"
ja:"ミコアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergus"
ja:"ウミアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tachybaptus"
ja:"カイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"podiceps"
ja:"カンムリカイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"phaethon"
ja:"ネッタイチョウ属"
rank:"genus"
upper:"phaethontidae"
}
{
sc:"syrrhaptes"
ja:"サケイ属"
rank:"genus"
upper:"pteroclidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"chalcophaps"
ja:"キンバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"treron"
ja:"アオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ptilinopus"
ja:"ヒメアオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"gavia"
ja:"アビ属"
rank:"genus"
upper:"gaviidae"
}
{
sc:"phoebastria"
ja:"アホウドリ属"
rank:"genus"
upper:"diomedeidae"
}
{
sc:"fulmarus"
ja:"フルマカモメ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"pterodroma"
ja:"シロハラミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"calonectris"
ja:"オオミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"puffinus"
ja:"ハイイロミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"bulweria"
ja:"アナドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"oceanites"
ja:"アシナガウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"oceanodroma"
ja:"オーストンウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"fregata"
ja:"グンカンドリ属"
rank:"genus"
upper:"fregatidae"
}
{
sc:"sula"
ja:"カツオドリ属"
rank:"genus"
upper:"sulidae"
}
{
sc:"phalacrocorax"
ja:"ウ属"
rank:"genus"
upper:"phalacrocoracidae"
}
{
sc:"pelecanus"
ja:"ペリカン属"
rank:"genus"
upper:"pelecanidae"
}
{
sc:"botaurus"
ja:"サンカノゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ixobrychus"
ja:"ヨシゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"gorsachius"
ja:"ミゾゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"nycticorax"
ja:"ゴイサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"butorides"
ja:"ササゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardeola"
ja:"アカガシラサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"bubulcus"
ja:"アマサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardea"
ja:"アオサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"egretta"
ja:"コサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"threskiornis"
ja:"クロトキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"platalea"
ja:"ヘラサギ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"grus"
ja:"ツル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"anthropoides"
ja:"アネハヅル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"coturnicops"
ja:"シマクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallina"
ja:"オオクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallirallus"
ja:"ヤンバルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallus"
ja:"クイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"amaurornis"
ja:"シロハラクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"porzana"
ja:"ヒメクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallicrex"
ja:"ツルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallinula"
ja:"バン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"fulica"
ja:"オオバン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"otis"
ja:"ノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"tetrax"
ja:"ヒメノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"centropus"
ja:"バンケン属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"clamator"
ja:"カンムリカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"eudynamys"
ja:"オニカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"urodynamis"
ja:"キジカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"surniculus"
ja:"オウチュウカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"hierococcyx"
ja:"ジュウイチ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"cuculus"
ja:"カッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"caprimulgus"
ja:"ヨタカ属"
rank:"genus"
upper:"caprimulgidae"
}
{
sc:"aerodramus"
ja:"ヒマラヤアナツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"hirundapus"
ja:"ハリオアマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"apus"
ja:"アマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"vanellus"
ja:"タゲリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"pluvialis"
ja:"ムナグロ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"charadrius"
ja:"チドリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"haematopus"
ja:"ミヤコドリ属"
rank:"genus"
upper:"haematopodidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"recurvirostra"
ja:"ソリハシセイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"scolopax"
ja:"ヤマシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"lymnocryptes"
ja:"コシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"gallinago"
ja:"タシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limnodromus"
ja:"オオハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limosa"
ja:"オグロシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"numenius"
ja:"ダイシャクシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tringa"
ja:"クサシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"heteroscelus"
ja:"キアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"xenus"
ja:"ソリハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"actitis"
ja:"イソシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"arenaria"
ja:"キョウジョシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"calidris"
ja:"オバシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"eurynorhynchus"
ja:"ヘラシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limicola"
ja:"キリアイ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tryngites"
ja:"コモンシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"philomachus"
ja:"エリマキシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"phalaropus"
ja:"ヒレアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"hydrophasianus"
ja:"レンカク属"
rank:"genus"
upper:"jacanidae"
}
{
sc:"rostratula"
ja:"タマシギ属"
rank:"genus"
upper:"rostratulidae"
}
{
sc:"turnix"
ja:"ミフウズラ属"
rank:"genus"
upper:"turnicidae"
}
{
sc:"glareola"
ja:"ツバメチドリ属"
rank:"genus"
upper:"glareolidae"
}
{
sc:"anous"
ja:"クロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"procelsterna"
ja:"ハイイロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gygis"
ja:"シロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rissa"
ja:"ミツユビカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"pagophila"
ja:"ゾウゲカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"xema"
ja:"クビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rhodostethia"
ja:"ヒメクビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"larus"
ja:"カモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gelochelidon"
ja:"ハシブトアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"sterna"
ja:"アジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"chlidonias"
ja:"クロハラアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"stercorarius"
ja:"トウゾクカモメ属"
rank:"genus"
upper:"stercorariidae"
}
{
sc:"alle"
ja:"ヒメウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"uria"
ja:"ウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"alca"
ja:"オオハシウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cepphus"
ja:"ウミバト属"
rank:"genus"
upper:"alcidae"
}
{
sc:"brachyramphus"
ja:"マダラウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"synthliboramphus"
ja:"ウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"aethia"
ja:"エトロフウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cerorhinca"
ja:"ウトウ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"fratercula"
ja:"ツノメドリ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"pandion"
ja:"ミサゴ属"
rank:"genus"
upper:"pandionidae"
}
{
sc:"pernis"
ja:"ハチクマ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"elanus"
ja:"カタグロトビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"milvus"
ja:"トビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"haliaeetus"
ja:"オジロワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aegypius"
ja:"クロハゲワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"spilornis"
ja:"カンムリワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"circus"
ja:"チュウヒ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"accipiter"
ja:"ハイタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"butastur"
ja:"サシバ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"buteo"
ja:"ノスリ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aquila"
ja:"イヌワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"nisaetus"
ja:"クマタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"tyto"
ja:"メンフクロウ属"
rank:"genus"
upper:"tytonidae"
}
{
sc:"otus"
ja:"コノハズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"bubo"
ja:"ワシミミズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ketupa"
ja:"シマフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"strix"
ja:"フクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"aegolius"
ja:"キンメフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ninox"
ja:"アオバズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"asio"
ja:"トラフズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"upupa"
ja:"ヤツガシラ属"
rank:"genus"
upper:"upupidae"
}
{
sc:"halcyon"
ja:"アカショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"todiramphus"
ja:"ナンヨウショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"alcedo"
ja:"カワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"ceyx"
ja:"ミツユビカワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"megaceryle"
ja:"ヤマセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"merops"
ja:"ハチクイ属"
rank:"genus"
upper:"meropidae"
}
{
sc:"eurystomus"
ja:"ブッポウソウ属"
rank:"genus"
upper:"coraciidae"
}
{
sc:"jynx"
ja:"アリスイ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dendrocopos"
ja:"アカゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picoides"
ja:"ミユビゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dryocopus"
ja:"クマゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picus"
ja:"アオゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"sapheopipo"
ja:"ノグチゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"falco"
ja:"ハヤブサ属"
rank:"genus"
upper:"falconidae"
}
{
sc:"pitta"
ja:"ヤイロチョウ属"
rank:"genus"
upper:"pittidae"
}
{
sc:"artamus"
ja:"モリツバメ属"
rank:"genus"
upper:"artamidae"
}
{
sc:"coracina"
ja:"アサクラサンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"pericrocotus"
ja:"サンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"oriolus"
ja:"コウライウグイス属"
rank:"genus"
upper:"oriolidae"
}
{
sc:"dicrurus"
ja:"オウチュウ属"
rank:"genus"
upper:"dicruridae"
}
{
sc:"hypothymis"
ja:"クロエリヒタキ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"terpsiphone"
ja:"サンコウチョウ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"lanius"
ja:"モズ属"
rank:"genus"
upper:"laniidae"
}
{
sc:"garrulus"
ja:"カケス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"cyanopica"
ja:"オナガ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"nucifraga"
ja:"ホシガラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"corvus"
ja:"カラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"regulus"
ja:"キクイタダキ属"
rank:"genus"
upper:"regulidae"
}
{
sc:"remiz"
ja:"ツリスガラ属"
rank:"genus"
upper:"remizidae"
}
{
sc:"poecile"
ja:"コガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"periparus"
ja:"ヒガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"parus"
ja:"シジュウカラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"cyanistes"
ja:"ルリガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"panurus"
ja:"ヒゲガラ属"
rank:"genus"
upper:"panuridae"
}
{
sc:"melanocorypha"
ja:"コウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"calandrella"
ja:"ヒメコウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"alauda"
ja:"ヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"eremophila"
ja:"ハマヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"riparia"
ja:"ショウドウツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"tachycineta"
ja:"ミドリツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"hirundo"
ja:"ツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"delichon"
ja:"イワツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"pycnonotus"
ja:"シロガシラ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"hypsipetes"
ja:"ヒヨドリ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"cettia"
ja:"ウグイス属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"urosphena"
ja:"ヤブサメ属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"aegithalos"
ja:"エナガ属"
rank:"genus"
upper:"aegithalidae"
}
{
sc:"phylloscopus"
ja:"ムシクイ属"
rank:"genus"
upper:"phylloscopidae"
}
{
sc:"sylvia"
ja:"ズグロムシクイ属"
rank:"genus"
upper:"sylviidae"
}
{
sc:"apalopteron"
ja:"メグロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"locustella"
ja:"センニュウ属"
rank:"genus"
upper:"locustellidae"
}
{
sc:"acrocephalus"
ja:"ヨシキリ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"iduna"
ja:"ヒメウタイムシクイ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"cisticola"
ja:"セッカ属"
rank:"genus"
upper:"cisticolidae"
}
{
sc:"bombycilla"
ja:"レンジャク属"
rank:"genus"
upper:"bombycillidae"
}
{
sc:"sitta"
ja:"ゴジュウカラ属"
rank:"genus"
upper:"sittidae"
}
{
sc:"certhia"
ja:"キバシリ属"
rank:"genus"
upper:"certhiidae"
}
{
sc:"troglodytes"
ja:"ミソサザイ属"
rank:"genus"
upper:"troglodytidae"
}
{
sc:"spodiopsar"
ja:"ムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"agropsar"
ja:"コムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnia"
ja:"カラムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"pastor"
ja:"バライロムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnus"
ja:"ホシムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"cinclus"
ja:"カワガラス属"
rank:"genus"
upper:"cinclidae"
}
{
sc:"zoothera"
ja:"トラツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cichlopasser"
ja:"オガサワラガビチョウ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"catharus"
ja:"チャツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"turdus"
ja:"ツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"erithacus"
ja:"ヨーロッパコマドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"luscinia"
ja:"ノゴマ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"tarsiger"
ja:"ルリビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"phoenicurus"
ja:"ジョウビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"saxicola"
ja:"ノビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"oenanthe"
ja:"サバクヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"monticola"
ja:"イソヒヨドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"muscicapa"
ja:"サメビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"ficedula"
ja:"キビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cyanoptila"
ja:"オオルリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"eumyias"
ja:"アイイロヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"niltava"
ja:"アオヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"prunella"
ja:"カヤクグリ属"
rank:"genus"
upper:"prunellidae"
}
{
sc:"passer"
ja:"スズメ属"
rank:"genus"
upper:"passeridae"
}
{
sc:"dendronanthus"
ja:"イワミセキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"motacilla"
ja:"セキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"anthus"
ja:"タヒバリ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"fringilla"
ja:"アトリ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chloris"
ja:"カワラヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carduelis"
ja:"マヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"leucosticte"
ja:"ハギマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"uragus"
ja:"ベニマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chaunoproctus"
ja:"オガサワラマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carpodacus"
ja:"オオマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pinicola"
ja:"ギンザンマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"loxia"
ja:"イスカ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pyrrhula"
ja:"ウソ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"coccothraustes"
ja:"シメ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"eophona"
ja:"イカル属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"calcarius"
ja:"ツメナガホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"plectrophenax"
ja:"ユキホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"setophaga"
ja:"ハゴロモムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"cardellina"
ja:"アカガオアメリカムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"emberiza"
ja:"ホオジロ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerella"
ja:"ゴマフスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"melospiza"
ja:"ウタスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"zonotrichia"
ja:"ミヤマシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerculus"
ja:"サバンナシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"bambusicola"
ja:"コジュケイ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"pavo"
ja:"クジャク属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"melopsittacus"
ja:"セキセイインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"psittacula"
ja:"ダルマインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"myiopsitta"
ja:"オキナインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"urocissa"
ja:"サンジャク属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"garrulax"
ja:"ガビチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"leiothrix"
ja:"ソウシチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"acridotheres"
ja:"ハッカチョウ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"gracupica"
ja:"クビワムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"ploceus"
ja:"キハタオリ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"euplectes"
ja:"キンランチョウ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"estrilda"
ja:"カエデチョウ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"amandava"
ja:"ベニスズメ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"lonchura"
ja:"キンパラ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"vidua"
ja:"テンニンチョウ属"
rank:"genus"
upper:"viduidae"
}
{
sc:"paroaria"
ja:"コウカンチョウ属"
rank:"genus"
upper:"thraupidae"
}
,{
"alien":false,
upper:"tetrastes"
rank:"species"
sc:"bonasia"
ja:"エゾライチョウ"
}
{
"alien":false,
upper:"lagopus"
rank:"species"
sc:"muta"
ja:"ライチョウ"
}
{
"alien":false,
upper:"coturnix"
rank:"species"
sc:"japonica"
ja:"ウズラ"
}
{
"alien":false,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":false,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":false,
upper:"dendrocygna"
rank:"species"
sc:"javanica"
ja:"リュウキュウガモ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"cygnoides"
ja:"サカツラガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"fabalis"
ja:"ヒシクイ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"anser"
ja:"ハイイロガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"erythropus"
ja:"カリガネ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"indicus"
ja:"インドガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"caerulescens"
ja:"ハクガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"canagicus"
ja:"ミカドガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"hutchinsii"
ja:"シジュウカラガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"bernicla"
ja:"コクガン"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"buccinator"
ja:"ナキハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"columbianus"
ja:"コハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"cygnus"
ja:"オオハクチョウ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"tadorna"
ja:"ツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"ferruginea"
ja:"アカツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"cristata"
ja:"カンムリツクシガモ"
}
{
"alien":false,
upper:"aix"
rank:"species"
sc:"galericulata"
ja:"オシドリ"
}
{
"alien":false,
upper:"nettapus"
rank:"species"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"falcata"
ja:"ヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"penelope"
ja:"ヒドリガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"americana"
ja:"アメリカヒドリ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"luzonica"
ja:"アカノドカルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"zonorhyncha"
ja:"カルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"discors"
ja:"ミカヅキシマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"clypeata"
ja:"ハシビロガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"acuta"
ja:"オナガガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"querquedula"
ja:"シマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"formosa"
ja:"トモエガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
upper:"netta"
rank:"species"
sc:"rufina"
ja:"アカハシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"valisineria"
ja:"オオホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"americana"
ja:"アメリカホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"ferina"
ja:"ホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"baeri"
ja:"アカハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"nyroca"
ja:"メジロガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"collaris"
ja:"クビワキンクロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"fuligula"
ja:"キンクロハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"affinis"
ja:"コスズガモ"
}
{
"alien":false,
upper:"polysticta"
rank:"species"
sc:"stelleri"
ja:"コケワタガモ"
}
{
"alien":false,
upper:"somateria"
rank:"species"
sc:"spectabilis"
ja:"ケワタガモ"
}
{
"alien":false,
upper:"histrionicus"
rank:"species"
sc:"histrionicus"
ja:"シノリガモ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"perspicillata"
ja:"アラナミキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"fusca"
ja:"ビロードキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"americana"
ja:"クロガモ"
}
{
"alien":false,
upper:"clangula"
rank:"species"
sc:"hyemalis"
ja:"コオリガモ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"albeola"
ja:"ヒメハジロ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
upper:"mergellus"
rank:"species"
sc:"albellus"
ja:"ミコアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"serrator"
ja:"ウミアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"squamatus"
ja:"コウライアイサ"
}
{
"alien":false,
upper:"tachybaptus"
rank:"species"
sc:"ruficollis"
ja:"カイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"grisegena"
ja:"アカエリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"rubricauda"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"lepturus"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
upper:"syrrhaptes"
rank:"species"
sc:"paradoxus"
ja:"サケイ"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"oenas"
ja:"ヒメモリバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"versicolor"
ja:"オガサワラカラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"jouyi"
ja:"リュウキュウカラスバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"tranquebarica"
ja:"ベニバト"
}
{
"alien":false,
upper:"chalcophaps"
rank:"species"
sc:"indica"
ja:"キンバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"formosae"
ja:"ズアカアオバト"
}
{
"alien":false,
upper:"ptilinopus"
rank:"species"
sc:"leclancheri"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"stellata"
ja:"アビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"arctica"
ja:"オオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"pacifica"
ja:"シロエリオオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"immer"
ja:"ハシグロアビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"adamsii"
ja:"ハシジロアビ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"immutabilis"
ja:"コアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"nigripes"
ja:"クロアシアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"albatrus"
ja:"アホウドリ"
}
{
"alien":false,
upper:"fulmarus"
rank:"species"
sc:"glacialis"
ja:"フルマカモメ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"solandri"
ja:"ハジロミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"externa"
ja:"オオシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"neglecta"
ja:"カワリシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"phaeopygia"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"inexpectata"
ja:"マダラシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"nigripennis"
ja:"ハグロシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"hypoleuca"
ja:"シロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"longirostris"
ja:"ヒメシロハラミズナギドリ"
}
{
"alien":false,
upper:"calonectris"
rank:"species"
sc:"leucomelas"
ja:"オオミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"pacificus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bulleri"
ja:"ミナミオナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"griseus"
ja:"ハイイロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"tenuirostris"
ja:"ハシボソミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"creatopus"
ja:"シロハラアカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"carneipes"
ja:"アカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"nativitatis"
ja:"コミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"puffinus"
ja:"マンクスミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"newelli"
ja:"ハワイセグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"lherminieri"
ja:"セグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bryani"
ja:"オガサワラヒメミズナギドリ"
}
{
"alien":false,
upper:"bulweria"
rank:"species"
sc:"bulwerii"
ja:"アナドリ"
}
{
"alien":false,
upper:"oceanites"
rank:"species"
sc:"oceanicus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"castro"
ja:"クロコシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"monorhis"
ja:"ヒメクロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"tristrami"
ja:"オーストンウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"matsudairae"
ja:"クロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"nigra"
ja:"ナベコウ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"dactylatra"
ja:"アオツラカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"sula"
ja:"アカアシカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"leucogaster"
ja:"カツオドリ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"urile"
ja:"チシマウガラス"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"carbo"
ja:"カワウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"capillatus"
ja:"ウミウ"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"onocrotalus"
ja:"モモイロペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"philippensis"
ja:"ホシバシペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"crispus"
ja:"ハイイロペリカン"
}
{
"alien":false,
upper:"botaurus"
rank:"species"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"eurhythmus"
ja:"オオヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"cinnamomeus"
ja:"リュウキュウヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"goisagi"
ja:"ミゾゴイ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"melanolophus"
ja:"ズグロミゾゴイ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"caledonicus"
ja:"ハシブトゴイ"
}
{
"alien":false,
upper:"butorides"
rank:"species"
sc:"striata"
ja:"ササゴイ"
}
{
"alien":false,
upper:"ardeola"
rank:"species"
sc:"bacchus"
ja:"アカガシラサギ"
}
{
"alien":false,
upper:"bubulcus"
rank:"species"
sc:"ibis"
ja:"アマサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"cinerea"
ja:"アオサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"purpurea"
ja:"ムラサキサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"eulophotes"
ja:"カラシラサギ"
}
{
"alien":false,
upper:"threskiornis"
rank:"species"
sc:"melanocephalus"
ja:"クロトキ"
}
{
"alien":false,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"minor"
ja:"クロツラヘラサギ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"leucogeranus"
ja:"ソデグロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"vipio"
ja:"マナヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"japonensis"
ja:"タンチョウ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"grus"
ja:"クロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"monacha"
ja:"ナベヅル"
}
{
"alien":false,
upper:"anthropoides"
rank:"species"
sc:"virgo"
ja:"アネハヅル"
}
{
"alien":false,
upper:"coturnicops"
rank:"species"
sc:"exquisitus"
ja:"シマクイナ"
}
{
"alien":false,
upper:"rallina"
rank:"species"
sc:"eurizonoides"
ja:"オオクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"okinawae"
ja:"ヤンバルクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"striatus"
ja:"ミナミクイナ"
}
{
"alien":false,
upper:"rallus"
rank:"species"
sc:"aquaticus"
ja:"クイナ"
}
{
"alien":false,
upper:"amaurornis"
rank:"species"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"porzana"
ja:"コモンクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"fusca"
ja:"ヒクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"paykullii"
ja:"コウライクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"cinerea"
ja:"マミジロクイナ"
}
{
"alien":false,
upper:"gallicrex"
rank:"species"
sc:"cinerea"
ja:"ツルクイナ"
}
{
"alien":false,
upper:"gallinula"
rank:"species"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
upper:"fulica"
rank:"species"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
upper:"otis"
rank:"species"
sc:"tarda"
ja:"ノガン"
}
{
"alien":false,
upper:"tetrax"
rank:"species"
sc:"tetrax"
ja:"ヒメノガン"
}
{
"alien":false,
upper:"centropus"
rank:"species"
sc:"bengalensis"
ja:"バンケン"
}
{
"alien":false,
upper:"clamator"
rank:"species"
sc:"coromandus"
ja:"カンムリカッコウ"
}
{
"alien":false,
upper:"eudynamys"
rank:"species"
sc:"scolopaceus"
ja:"オニカッコウ"
}
{
"alien":false,
upper:"urodynamis"
rank:"species"
sc:"taitensis"
ja:"キジカッコウ"
}
{
"alien":false,
upper:"surniculus"
rank:"species"
sc:"lugubris"
ja:"オウチュウカッコウ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"sparverioides"
ja:"オオジュウイチ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"hyperythrus"
ja:"ジュウイチ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"poliocephalus"
ja:"ホトトギス"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"optatus"
ja:"ツツドリ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"canorus"
ja:"カッコウ"
}
{
"alien":false,
upper:"caprimulgus"
rank:"species"
sc:"indicus"
ja:"ヨタカ"
}
{
"alien":false,
upper:"aerodramus"
rank:"species"
sc:"brevirostris"
ja:"ヒマラヤアナツバメ"
}
{
"alien":false,
upper:"hirundapus"
rank:"species"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"pacificus"
ja:"アマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"nipalensis"
ja:"ヒメアマツバメ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"vanellus"
ja:"タゲリ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"cinereus"
ja:"ケリ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"apricaria"
ja:"ヨーロッパムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"fulva"
ja:"ムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"dominica"
ja:"アメリカムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"squatarola"
ja:"ダイゼン"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"hiaticula"
ja:"ハジロコチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"semipalmatus"
ja:"ミズカキチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"placidus"
ja:"イカルチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"dubius"
ja:"コチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"alexandrinus"
ja:"シロチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"mongolus"
ja:"メダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"leschenaultii"
ja:"オオメダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"veredus"
ja:"オオチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"morinellus"
ja:"コバシチドリ"
}
{
"alien":false,
upper:"haematopus"
rank:"species"
sc:"ostralegus"
ja:"ミヤコドリ"
}
{
"alien":false,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
upper:"recurvirostra"
rank:"species"
sc:"avosetta"
ja:"ソリハシセイタカシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"rusticola"
ja:"ヤマシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"mira"
ja:"アマミヤマシギ"
}
{
"alien":false,
upper:"lymnocryptes"
rank:"species"
sc:"minimus"
ja:"コシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"solitaria"
ja:"アオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"hardwickii"
ja:"オオジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"stenura"
ja:"ハリオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"megala"
ja:"チュウジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"griseus"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"scolopaceus"
ja:"オオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"semipalmatus"
ja:"シベリアオオハシシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"limosa"
ja:"オグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"haemastica"
ja:"アメリカオグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"lapponica"
ja:"オオソリハシシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"minutus"
ja:"コシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"phaeopus"
ja:"チュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tahitiensis"
ja:"ハリモモチュウシャク"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tenuirostris"
ja:"シロハラチュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"arquata"
ja:"ダイシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"madagascariensis"
ja:"ホウロクシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"erythropus"
ja:"ツルシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"totanus"
ja:"アカアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"stagnatilis"
ja:"コアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"nebularia"
ja:"アオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"guttifer"
ja:"カラフトアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"melanoleuca"
ja:"オオキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"flavipes"
ja:"コキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"ochropus"
ja:"クサシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"glareola"
ja:"タカブシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"brevipes"
ja:"キアシシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"incanus"
ja:"メリケンキアシシギ"
}
{
"alien":false,
upper:"xenus"
rank:"species"
sc:"cinereus"
ja:"ソリハシシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"hypoleucos"
ja:"イソシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"macularius"
ja:"アメリカイソシギ"
}
{
"alien":false,
upper:"arenaria"
rank:"species"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"tenuirostris"
ja:"オバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"canutus"
ja:"コオバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alba"
ja:"ミユビシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"mauri"
ja:"ヒメハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ruficollis"
ja:"トウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"minuta"
ja:"ヨーロッパトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"temminckii"
ja:"オジロトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"subminuta"
ja:"ヒバリシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"fuscicollis"
ja:"コシジロウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"bairdii"
ja:"ヒメウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"melanotos"
ja:"アメリカウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"acuminata"
ja:"ウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ferruginea"
ja:"サルハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ptilocnemis"
ja:"チシマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alpina"
ja:"ハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"himantopus"
ja:"アシナガシギ"
}
{
"alien":false,
upper:"eurynorhynchus"
rank:"species"
sc:"pygmeus"
ja:"ヘラシギ"
}
{
"alien":false,
upper:"limicola"
rank:"species"
sc:"falcinellus"
ja:"キリアイ"
}
{
"alien":false,
upper:"tryngites"
rank:"species"
sc:"subruficollis"
ja:"コモンシギ"
}
{
"alien":false,
upper:"philomachus"
rank:"species"
sc:"pugnax"
ja:"エリマキシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"tricolor"
ja:"アメリカヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"lobatus"
ja:"アカエリヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"fulicarius"
ja:"ハイイロヒレアシシギ"
}
{
"alien":false,
upper:"hydrophasianus"
rank:"species"
sc:"chirurgus"
ja:"レンカク"
}
{
"alien":false,
upper:"rostratula"
rank:"species"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
upper:"turnix"
rank:"species"
sc:"suscitator"
ja:"ミフウズラ"
}
{
"alien":false,
upper:"glareola"
rank:"species"
sc:"maldivarum"
ja:"ツバメチドリ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"stolidus"
ja:"クロアジサシ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"minutus"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
upper:"procelsterna"
rank:"species"
sc:"cerulea"
ja:"ハイイロアジサシ"
}
{
"alien":false,
upper:"gygis"
rank:"species"
sc:"alba"
ja:"シロアジサシ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"tridactyla"
ja:"ミツユビカモメ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"brevirostris"
ja:"アカアシミツユビカモメ"
}
{
"alien":false,
upper:"pagophila"
rank:"species"
sc:"eburnea"
ja:"ゾウゲカモメ"
}
{
"alien":false,
upper:"xema"
rank:"species"
sc:"sabini"
ja:"クビワカモメ"
}
{
"alien":false,
upper:"rhodostethia"
rank:"species"
sc:"rosea"
ja:"ヒメクビワカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"genei"
ja:"ハシボソカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"philadelphia"
ja:"ボナパルトカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"brunnicephalus"
ja:"チャガシラカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ridibundus"
ja:"ユリカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"saundersi"
ja:"ズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"minutus"
ja:"ヒメカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"atricilla"
ja:"ワライカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"pipixcan"
ja:"アメリカズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"relictus"
ja:"ゴビズキンカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ichthyaetus"
ja:"オオズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"crassirostris"
ja:"ウミネコ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"canus"
ja:"カモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucescens"
ja:"ワシカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"hyperboreus"
ja:"シロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"thayeri"
ja:"カナダカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"argentatus"
ja:"セグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"cachinnans"
ja:"キアシセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"schistisagus"
ja:"オオセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"fuscus"
ja:"ニシセグロカモメ"
}
{
"alien":false,
upper:"gelochelidon"
rank:"species"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"caspia"
ja:"オニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bergii"
ja:"オオアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bengalensis"
ja:"ベンガルアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"albifrons"
ja:"コアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"aleutica"
ja:"コシジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"lunata"
ja:"ナンヨウマミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"fuscata"
ja:"セグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"dougallii"
ja:"ベニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"sumatrana"
ja:"エリグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"hirundo"
ja:"アジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"paradisaea"
ja:"キョクアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"hybrida"
ja:"クロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"leucopterus"
ja:"ハジロクロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"maccormicki"
ja:"オオトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"pomarinus"
ja:"トウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"parasiticus"
ja:"クロトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"longicaudus"
ja:"シロハラトウゾクカモメ"
}
{
"alien":false,
upper:"alle"
rank:"species"
sc:"alle"
ja:"ヒメウミスズメ"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"lomvia"
ja:"ハシブトウミガラス"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"aalge"
ja:"ウミガラス"
}
{
"alien":false,
upper:"alca"
rank:"species"
sc:"torda"
ja:"オオハシウミガラス"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"columba"
ja:"ウミバト"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"carbo"
ja:"ケイマフリ"
}
{
"alien":false,
upper:"brachyramphus"
rank:"species"
sc:"perdix"
ja:"マダラウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"antiquus"
ja:"ウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"wumizusume"
ja:"カンムリウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"psittacula"
ja:"ウミオウム"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pusilla"
ja:"コウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pygmaea"
ja:"シラヒゲウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"cristatella"
ja:"エトロフウミスズメ"
}
{
"alien":false,
upper:"cerorhinca"
rank:"species"
sc:"monocerata"
ja:"ウトウ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"corniculata"
ja:"ツノメドリ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"cirrhata"
ja:"エトピリカ"
}
{
"alien":false,
upper:"pandion"
rank:"species"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
upper:"pernis"
rank:"species"
sc:"ptilorhynchus"
ja:"ハチクマ"
}
{
"alien":false,
upper:"elanus"
rank:"species"
sc:"caeruleus"
ja:"カタグロトビ"
}
{
"alien":false,
upper:"milvus"
rank:"species"
sc:"migrans"
ja:"トビ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"leucocephalus"
ja:"ハクトウワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"pelagicus"
ja:"オオワシ"
}
{
"alien":false,
upper:"aegypius"
rank:"species"
sc:"monachus"
ja:"クロハゲワシ"
}
{
"alien":false,
upper:"spilornis"
rank:"species"
sc:"cheela"
ja:"カンムリワシ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"macrourus"
ja:"ウスハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"melanoleucos"
ja:"マダラチュウヒ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"soloensis"
ja:"アカハラダカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"nisus"
ja:"ハイタカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gentilis"
ja:"オオタカ"
}
{
"alien":false,
upper:"butastur"
rank:"species"
sc:"indicus"
ja:"サシバ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"buteo"
ja:"ノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"hemilasius"
ja:"オオノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"lagopus"
ja:"ケアシノスリ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"clanga"
ja:"カラフトワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"heliaca"
ja:"カタシロワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"chrysaetos"
ja:"イヌワシ"
}
{
"alien":false,
upper:"nisaetus"
rank:"species"
sc:"nipalensis"
ja:"クマタカ"
}
{
"alien":false,
upper:"tyto"
rank:"species"
sc:"longimembris"
ja:"ヒガシメンフクロウ"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"lempiji"
ja:"オオコノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"sunia"
ja:"コノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"scandiacus"
ja:"シロフクロウ"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"bubo"
ja:"ワシミミズク"
}
{
"alien":false,
upper:"ketupa"
rank:"species"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
upper:"strix"
rank:"species"
sc:"uralensis"
ja:"フクロウ"
}
{
"alien":false,
upper:"aegolius"
rank:"species"
sc:"funereus"
ja:"キンメフクロウ"
}
{
"alien":false,
upper:"ninox"
rank:"species"
sc:"scutulata"
ja:"アオバズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
upper:"upupa"
rank:"species"
sc:"epops"
ja:"ヤツガシラ"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"coromanda"
ja:"アカショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"smyrnensis"
ja:"アオショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"pileata"
ja:"ヤマショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"chloris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"miyakoensis"
ja:"ミヤコショウビン"
}
{
"alien":false,
upper:"alcedo"
rank:"species"
sc:"atthis"
ja:"カワセミ"
}
{
"alien":false,
upper:"ceyx"
rank:"species"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
upper:"megaceryle"
rank:"species"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
upper:"merops"
rank:"species"
sc:"ornatus"
ja:"ハチクイ"
}
{
"alien":false,
upper:"eurystomus"
rank:"species"
sc:"orientalis"
ja:"ブッポウソウ"
}
{
"alien":false,
upper:"jynx"
rank:"species"
sc:"torquilla"
ja:"アリスイ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"hyperythrus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"kizuki"
ja:"コゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"minor"
ja:"コアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"leucotos"
ja:"オオアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"major"
ja:"アカゲラ"
}
{
"alien":false,
upper:"picoides"
rank:"species"
sc:"tridactylus"
ja:"ミユビゲラ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"javensis"
ja:"キタタキ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"canus"
ja:"ヤマゲラ"
}
{
"alien":false,
upper:"sapheopipo"
rank:"species"
sc:"noguchii"
ja:"ノグチゲラ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"naumanni"
ja:"ヒメチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"tinnunculus"
ja:"チョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"amurensis"
ja:"アカアシチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"columbarius"
ja:"コチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"cherrug"
ja:"ワキスジハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"rusticolus"
ja:"シロハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"peregrinus"
ja:"ハヤブサ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"sordida"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"nympha"
ja:"ヤイロチョウ"
}
{
"alien":false,
upper:"artamus"
rank:"species"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
upper:"coracina"
rank:"species"
sc:"melaschistos"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
upper:"pericrocotus"
rank:"species"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
upper:"oriolus"
rank:"species"
sc:"chinensis"
ja:"コウライウグイス"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"macrocercus"
ja:"オウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"leucophaeus"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"hottentottus"
ja:"カンムリオウチュウ"
}
{
"alien":false,
upper:"hypothymis"
rank:"species"
sc:"azurea"
ja:"クロエリヒタキ"
}
{
"alien":false,
upper:"terpsiphone"
rank:"species"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"tigrinus"
ja:"チゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"cristatus"
ja:"アカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"collurio"
ja:"セアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"isabellinus"
ja:"モウコアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"excubitor"
ja:"オオモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"glandarius"
ja:"カケス"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"lidthi"
ja:"ルリカケス"
}
{
"alien":false,
upper:"cyanopica"
rank:"species"
sc:"cyanus"
ja:"オナガ"
}
{
"alien":false,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":false,
upper:"nucifraga"
rank:"species"
sc:"caryocatactes"
ja:"ホシガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"monedula"
ja:"ニシコクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"dauuricus"
ja:"コクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"frugilegus"
ja:"ミヤマガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corone"
ja:"ハシボソガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"macrorhynchos"
ja:"ハシブトガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corax"
ja:"ワタリガラス"
}
{
"alien":false,
upper:"regulus"
rank:"species"
sc:"regulus"
ja:"キクイタダキ"
}
{
"alien":false,
upper:"remiz"
rank:"species"
sc:"pendulinus"
ja:"ツリスガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"palustris"
ja:"ハシブトガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"montanus"
ja:"コガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"ater"
ja:"ヒガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"venustulus"
ja:"キバラガラ"
}
{
"alien":false,
upper:"parus"
rank:"species"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
upper:"cyanistes"
rank:"species"
sc:"cyanus"
ja:"ルリガラ"
}
{
"alien":false,
upper:"panurus"
rank:"species"
sc:"biarmicus"
ja:"ヒゲガラ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"bimaculata"
ja:"クビワコウテンシ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"mongolica"
ja:"コウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"brachydactyla"
ja:"ヒメコウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
upper:"alauda"
rank:"species"
sc:"arvensis"
ja:"ヒバリ"
}
{
"alien":false,
upper:"eremophila"
rank:"species"
sc:"alpestris"
ja:"ハマヒバリ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"paludicola"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"riparia"
ja:"ショウドウツバメ"
}
{
"alien":false,
upper:"tachycineta"
rank:"species"
sc:"bicolor"
ja:"ミドリツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"rustica"
ja:"ツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"tahitica"
ja:"リュウキュウツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"daurica"
ja:"コシアカツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"urbicum"
ja:"ニシイワツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
upper:"pycnonotus"
rank:"species"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
upper:"hypsipetes"
rank:"species"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
upper:"cettia"
rank:"species"
sc:"diphone"
ja:"ウグイス"
}
{
"alien":false,
upper:"urosphena"
rank:"species"
sc:"squameiceps"
ja:"ヤブサメ"
}
{
"alien":false,
upper:"aegithalos"
rank:"species"
sc:"caudatus"
ja:"エナガ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"trochilus"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"collybita"
ja:"チフチャフ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"sibilatrix"
ja:"モリムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"affinis"
ja:"キバラムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"schwarzi"
ja:"カラフトムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"proregulus"
ja:"カラフトムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"inornatus"
ja:"キマユムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"examinandus"
ja:"オオムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"xanthodryas"
ja:"メボソムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"plumbeitarsus"
ja:"ヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealoides"
ja:"エゾムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"coronatus"
ja:"センダイムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"ijimae"
ja:"イイジマムシクイ"
}
{
"alien":false,
upper:"sylvia"
rank:"species"
sc:"curruca"
ja:"コノドジロムシクイ"
}
{
"alien":false,
upper:"apalopteron"
rank:"species"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"erythropleurus"
ja:"チョウセンメジロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"lanceolata"
ja:"マキノセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"ochotensis"
ja:"シマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pleskei"
ja:"ウチヤマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"certhiola"
ja:"シベリアセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"fasciolata"
ja:"エゾセンニュウ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"orientalis"
ja:"オオヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"sorghophilus"
ja:"セスジコヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"agricola"
ja:"イナダヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"dumetorum"
ja:"ヤブヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"aedon"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
upper:"iduna"
rank:"species"
sc:"caligata"
ja:"ヒメウタイムシクイ"
}
{
"alien":false,
upper:"cisticola"
rank:"species"
sc:"juncidis"
ja:"セッカ"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"garrulus"
ja:"キレンジャク"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"japonica"
ja:"ヒレンジャク"
}
{
"alien":false,
upper:"sitta"
rank:"species"
sc:"europaea"
ja:"ゴジュウカラ"
}
{
"alien":false,
upper:"certhia"
rank:"species"
sc:"familiaris"
ja:"キバシリ"
}
{
"alien":false,
upper:"troglodytes"
rank:"species"
sc:"troglodytes"
ja:"ミソサザイ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"sericeus"
ja:"ギンムクドリ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"cineraceus"
ja:"ムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"sturninus"
ja:"シベリアムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"philippensis"
ja:"コムクドリ"
}
{
"alien":false,
upper:"sturnia"
rank:"species"
sc:"sinensis"
ja:"カラムクドリ"
}
{
"alien":false,
upper:"pastor"
rank:"species"
sc:"roseus"
ja:"バライロムクドリ"
}
{
"alien":false,
upper:"sturnus"
rank:"species"
sc:"vulgaris"
ja:"ホシムクドリ"
}
{
"alien":false,
upper:"cinclus"
rank:"species"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"sibirica"
ja:"マミジロ"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"dauma"
ja:"トラツグミ"
}
{
"alien":false,
upper:"cichlopasser"
rank:"species"
sc:"terrestris"
ja:"オガサワラガビチョウ"
}
{
"alien":false,
upper:"catharus"
rank:"species"
sc:"minimus"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"hortulorum"
ja:"カラアカハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"cardis"
ja:"クロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"merula"
ja:"クロウタドリ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"obscurus"
ja:"マミチャジナイ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pallidus"
ja:"シロハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"chrysolaus"
ja:"<NAME>カハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"celaenops"
ja:"アカコッコ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"ruficollis"
ja:"ノドグロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"naumanni"
ja:"ツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pilaris"
ja:"ノハラツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"iliacus"
ja:"ワキアカツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"viscivorus"
ja:"ヤドリギツグミ"
}
{
"alien":false,
upper:"erithacus"
rank:"species"
sc:"rubecula"
ja:"ヨーロッパコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"calliope"
ja:"ノゴマ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"cyane"
ja:"コルリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"sibilans"
ja:"シマゴマ"
}
{
"alien":false,
upper:"tarsiger"
rank:"species"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"erythronotus"
ja:"セアカジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"ochruros"
ja:"クロジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"rubetra"
ja:"マミジロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"torquatus"
ja:"ノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"caprata"
ja:"クロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"ferreus"
ja:"ヤマザキヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"isabellina"
ja:"イナバヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"pleschanka"
ja:"セグロサバクヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"deserti"
ja:"サバクヒタキ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"solitarius"
ja:"イソヒヨドリ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"gularis"
ja:"ヒメイソヒヨ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"striata"
ja:"ムナフヒタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"griseisticta"
ja:"エゾビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"ferruginea"
ja:"ミヤマヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"hypoleuca"
ja:"マダラヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"zanthopygia"
ja:"マミジロキビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"mugimaki"
ja:"ムギマキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"albicilla"
ja:"オジロビタキ"
}
{
"alien":false,
upper:"cyanoptila"
rank:"species"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
upper:"eumyias"
rank:"species"
sc:"thalassinus"
ja:"ロクショウヒタキ"
}
{
"alien":false,
upper:"niltava"
rank:"species"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"collaris"
ja:"イワヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"montanella"
ja:"ヤマヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"rubida"
ja:"カヤクグリ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"montanus"
ja:"スズメ"
}
{
"alien":false,
upper:"dendronanthus"
rank:"species"
sc:"indicus"
ja:"イワミセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"flava"
ja:"ツメナガセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"alba"
ja:"ハクセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"grandis"
ja:"セグロセキレイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"godlewskii"
ja:"コマミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"pratensis"
ja:"マキバタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"roseatus"
ja:"ウスベニタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"cervinus"
ja:"ムネアカタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"rubescens"
ja:"タヒバリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"montifringilla"
ja:"アトリ"
}
{
"alien":false,
upper:"chloris"
rank:"species"
sc:"sinica"
ja:"カワラヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"spinus"
ja:"マヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"hornemanni"
ja:"コベニヒワ"
}
{
"alien":false,
upper:"leucosticte"
rank:"species"
sc:"arctoa"
ja:"ハギマシコ"
}
{
"alien":false,
upper:"uragus"
rank:"species"
sc:"sibiricus"
ja:"ベニマシコ"
}
{
"alien":false,
upper:"chaunoproctus"
rank:"species"
sc:"ferreorostris"
ja:"オガサワラマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"erythrinus"
ja:"アカマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"roseus"
ja:"オオマシコ"
}
{
"alien":false,
upper:"pinicola"
rank:"species"
sc:"enucleator"
ja:"ギンザンマシコ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"curvirostra"
ja:"イスカ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"leucoptera"
ja:"ナキイスカ"
}
{
"alien":false,
upper:"pyrrhula"
rank:"species"
sc:"pyrrhula"
ja:"ウソ"
}
{
"alien":false,
upper:"coccothraustes"
rank:"species"
sc:"coccothraustes"
ja:"シメ"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
upper:"calcarius"
rank:"species"
sc:"lapponicus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
upper:"plectrophenax"
rank:"species"
sc:"nivalis"
ja:"ユキホオジロ"
}
{
"alien":false,
upper:"setophaga"
rank:"species"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
upper:"cardellina"
rank:"species"
sc:"pusilla"
ja:"ウィルソンアメリカムシクイ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"lathami"
ja:"レンジャクノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"citrinella"
ja:"キアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"cioides"
ja:"ホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"buchanani"
ja:"イワバホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"hortulana"
ja:"ズアオホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"tristrami"
ja:"シロハラホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pusilla"
ja:"コホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"chrysophrys"
ja:"キマユホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rustica"
ja:"カシラダカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"aureola"
ja:"シマアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rutila"
ja:"シマノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"melanocephala"
ja:"ズグロチャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"bruniceps"
ja:"チャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"sulphurata"
ja:"ノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"spodocephala"
ja:"アオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"variabilis"
ja:"クロジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pallasi"
ja:"シベリアジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"schoeniclus"
ja:"オオジュリン"
}
{
"alien":false,
upper:"passerella"
rank:"species"
sc:"iliaca"
ja:"ゴマフスズメ"
}
{
"alien":false,
upper:"melospiza"
rank:"species"
sc:"melodia"
ja:"ウタスズメ"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"leucophrys"
ja:"ミヤマシトド"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"atricapilla"
ja:"キガシラシトド"
}
{
"alien":false,
upper:"passerculus"
rank:"species"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
upper:"bambusicola"
rank:"species"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":true,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":true,
upper:"pavo"
rank:"species"
sc:"cristatus"
ja:"インドクジャク"
}
{
"alien":true,
upper:"branta"
rank:"species"
sc:"canadensis"
ja:"カナダガン"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"atratus"
ja:"コクチョウ"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":true,
upper:"columba"
rank:"species"
sc:"livia"
ja:"カワラバト(ドバト)"
}
{
"alien":true,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":true,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":true,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":true,
upper:"melopsittacus"
rank:"species"
sc:"undulatus"
ja:"セキセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"eupatria"
ja:"オオホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"krameri"
ja:"ホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"alexandri"
ja:"ダルマインコ"
}
{
"alien":true,
upper:"myiopsitta"
rank:"species"
sc:"monachus"
ja:"オキナインコ"
}
{
"alien":true,
upper:"urocissa"
rank:"species"
sc:"caerulea"
ja:"ヤマムスメ"
}
{
"alien":true,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":true,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"canorus"
ja:"ガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"cineraceus"
ja:"ヒゲガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"perspicillatus"
ja:"カオグロガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"sannio"
ja:"カオジロガビチョウ"
}
{
"alien":true,
upper:"leiothrix"
rank:"species"
sc:"lutea"
ja:"ソウシチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"javanicus"
ja:"モリハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"ginginianus"
ja:"ハイイロハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
upper:"gracupica"
rank:"species"
sc:"contra"
ja:"ホオジロムクドリ"
}
{
"alien":true,
upper:"ploceus"
rank:"species"
sc:"intermedius"
ja:"メンハタオリドリ"
}
{
"alien":true,
upper:"euplectes"
rank:"species"
sc:"orix"
ja:"オオキンランチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"melpoda"
ja:"ホオアカカエデチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"troglodytes"
ja:"カエデチョウ"
}
{
"alien":true,
upper:"amandava"
rank:"species"
sc:"amandava"
ja:"ベニスズメ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"striata"
ja:"コシジロキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"punctulata"
ja:"シマキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"malacca"
ja:"ギンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"atricapilla"
ja:"キンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"maja"
ja:"ヘキチョウ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"oryzivora"
ja:"ブンチョウ"
}
{
"alien":true,
upper:"vidua"
rank:"species"
sc:"paradisaea"
ja:"ホウオウジャク"
}
{
"alien":true,
upper:"paroaria"
rank:"species"
sc:"coronata"
ja:"コウカンチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bonasia"
sc:"vicinitas"
ja:"エゾライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"muta"
sc:"japonica"
ja:"ライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"intermedius"
ja:"シコクヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"soemmerringii"
ja:"アカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"ijimae"
ja:"コシジロヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"robustipes"
ja:"キジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tohkaidi"
ja:"トウカイキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tanensis"
ja:"シマキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"versicolor"
ja:"キュウシュウキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"middendorffii"
ja:"オオヒシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"curtus"
ja:"<NAME>"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"serrirostris"
ja:"<NAME>シクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"anser"
sc:"rubrirostris"
ja:"<NAME>イロ<NAME>"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"caerulescens"
ja:"<NAME>ガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"atlanticus"
ja:"オオハクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"leucopareia"
ja:"シジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"minima"
ja:"ヒメシジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bernicla"
sc:"orientalis"
ja:"コクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"jankowskyi"
ja:"コハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"columbianus"
ja:"アメリカコハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromandelianus"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
rank:"subspecies"
upper:"strepera"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"platyrhynchos"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"carolinensis"
ja:"アメリカコガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"marila"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"stejnegeri"
ja:"ビロードキンクロ"
}
{
"alien":false,
rank:"subspecies"
upper:"clangula"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"orientalis"
ja:"コカワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"poggei"
ja:"カイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"kunikyonis"
ja:"ダイトウカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"grisegena"
sc:"holbollii"
ja:"アカエリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"auritus"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"nigricollis"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubricauda"
sc:"rothschildi"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"lepturus"
sc:"dorotheae"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"oenas"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"nitens"
ja:"アカガシラカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"stejnegeri"
ja:"ヨナグニカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"stimpsoni"
ja:"リュウキュウキジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
rank:"subspecies"
upper:"tranquebarica"
sc:"humilis"
ja:"ベニバト"
}
{
"alien":false,
rank:"subspecies"
upper:"indica"
sc:"yamashinai"
ja:"キンバト"
}
{
"alien":false,
rank:"subspecies"
upper:"sieboldii"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"permagnus"
ja:"ズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"medioximus"
ja:"チュウダイズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"leclancheri"
sc:"taiwanus"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"arctica"
sc:"viridigularis"
ja:"オオハム"
}
{
"alien":false,
rank:"subspecies"
upper:"glacialis"
sc:"rodgersii"
ja:"フルマカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopygia"
sc:"sandwichensis"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"cuneatus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lherminieri"
sc:"bannermani"
ja:"セグロミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"oceanicus"
sc:"exasperatus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorhoa"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"furcata"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ariel"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dactylatra"
sc:"personata"
ja:"アオツラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sula"
sc:"rubripes"
ja:"アカアシカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"plotus"
ja:"カツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"brewsteri"
ja:"シロガシラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pelagicus"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
rank:"subspecies"
upper:"carbo"
sc:"hanedae"
ja:"カワウ"
}
{
"alien":false,
rank:"subspecies"
upper:"stellaris"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"bryani"
ja:"マリアナヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flavicollis"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"nycticorax"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caledonicus"
sc:"crassirostris"
ja:"ハシブトゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"amurensis"
ja:"ササゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"ibis"
sc:"coromandus"
ja:"アマサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"jouyi"
ja:"アオサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"purpurea"
sc:"manilensis"
ja:"ムラサキサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"modesta"
ja:"チュウダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"intermedia"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"garzetta"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"sacra"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorodia"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canadensis"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"grus"
sc:"lilfordi"
ja:"クロヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"eurizonoides"
sc:"sepiaria"
ja:"オオクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"striatus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aquaticus"
sc:"indicus"
ja:"クイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"erythrothorax"
ja:"ヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"phaeopyga"
ja:"リュウキュウヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"brevipes"
ja:"マミジロクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"chloropus"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
rank:"subspecies"
upper:"atra"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
rank:"subspecies"
upper:"tarda"
sc:"dybowskii"
ja:"ノガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"lignator"
ja:"バンケン"
}
{
"alien":false,
rank:"subspecies"
upper:"scolopaceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"micropterus"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"canorus"
sc:"telephonus"
ja:"カッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"indicus"
sc:"jotaka"
ja:"ヨタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"brevirostris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"caudacutus"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"pacificus"
ja:"キタアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"kurodae"
ja:"アマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"kuntzi"
ja:"ヒメアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"apricaria"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"hiaticula"
sc:"tundrae"
ja:"ハジロコチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dubius"
sc:"curonicus"
ja:"コチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"alexandrinus"
ja:"ハシボソシロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"dealbatus"
ja:"シロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"mongolus"
ja:"モウコメダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"stegmanni"
ja:"メダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ostralegus"
sc:"osculans"
ja:"<NAME>コドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"leucocephalus"
ja:"オーストラリアセイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitaria"
sc:"japonica"
ja:"アオシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"gallinago"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"griseus"
sc:"hendersoni"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"limosa"
sc:"melanuroides"
ja:"オグロシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"menzbieri"
ja:"コシジロオオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"baueri"
ja:"オオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopus"
sc:"variegatus"
ja:"チュウシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"arquata"
sc:"orientalis"
ja:"ダイシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"totanus"
sc:"ussuriensis"
ja:"アカアシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"interpres"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canutus"
sc:"rogersi"
ja:"コオバシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilocnemis"
sc:"quarta"
ja:"チシマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"sakhalina"
ja:"ハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"arcticola"
ja:"キタアラスカハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"falcinellus"
sc:"sibirica"
ja:"キリアイ"
}
{
"alien":false,
rank:"subspecies"
upper:"benghalensis"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"suscitator"
sc:"okinavensis"
ja:"ミフウズラ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pileatus"
ja:"クロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pullus"
ja:"リュウキュウクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"minutus"
sc:"marcusi"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cerulea"
sc:"saxatilis"
ja:"ハイイロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"candida"
ja:"シロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactyla"
sc:"pollicaris"
ja:"ミツユビカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"atricilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"kamtschatschensis"
ja:"カモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"heinei"
ja:"ニシシベリアカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"brachyrhynchus"
ja:"コカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperboreus"
sc:"pallidissimus"
ja:"シロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"kumlieni"
ja:"クムリーンアイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"vegae"
ja:"セグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"smithsonianus"
ja:"アメリカセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"cachinnans"
sc:"mongolicus"
ja:"キアシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"heuglini"
ja:"ニシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"nilotica"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bergii"
sc:"cristata"
ja:"オオアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"sinensis"
ja:"コアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"anaethetus"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscata"
sc:"nubilosa"
ja:"セグロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"dougallii"
sc:"bangsi"
ja:"ベニアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"minussensis"
ja:"アカアシアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"longipennis"
ja:"アジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hybrida"
sc:"javanicus"
ja:"クロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"surinamensis"
ja:"アメリカハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alle"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lomvia"
sc:"arra"
ja:"ハシブトウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"aalge"
sc:"inornata"
ja:"ウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"torda"
sc:"islandica"
ja:"オオハシウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"snowi"
ja:"ウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"kaiurka"
ja:"アリューシャンウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"haliaetus"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilorhynchus"
sc:"orientalis"
ja:"ハチクマ"
}
{
"alien":false,
rank:"subspecies"
upper:"caeruleus"
sc:"hypoleucus"
ja:"カタグロトビ"
}
{
"alien":false,
rank:"subspecies"
upper:"migrans"
sc:"lineatus"
ja:"トビ"
}
{
"alien":false,
rank:"subspecies"
upper:"albicilla"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalus"
sc:"washingtoniensis"
ja:"ハクトウワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheela"
sc:"perplexus"
ja:"カンムリワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"aeruginosus"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"spilonotus"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyaneus"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"iwasakii"
ja:"リュウキュウツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"nisus"
sc:"nisosimilis"
ja:"ハイタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"albidus"
ja:"シロオオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"fujiyamae"
ja:"オオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"japonicus"
ja:"ノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"toyoshimai"
ja:"オガサワラノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"oshiroi"
ja:"ダイトウノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lagopus"
sc:"menzbieri"
ja:"ケアシノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysaetos"
sc:"japonica"
ja:"イヌワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"orientalis"
ja:"クマタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"longimembris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"ussuriensis"
ja:"サメイロオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"semitorques"
ja:"オオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"pryeri"
ja:"リュウキュウオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"sunia"
sc:"japonicus"
ja:"コノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"interpositus"
ja:"ダイトウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"kiautschensis"
ja:"タイリクワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"borissowi"
ja:"ワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"blakistoni"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"japonica"
ja:"エゾフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"hondoensis"
ja:"フクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"momiyamae"
ja:"モミヤマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"fuscescens"
ja:"キュウシュウフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"funereus"
sc:"magnus"
ja:"キンメフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"macroptera"
ja:"チョウセンアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"japonica"
ja:"アオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"totogo"
ja:"リュウキュウアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"otus"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
rank:"subspecies"
upper:"flammeus"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"epops"
sc:"saturata"
ja:"ヤツガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"major"
ja:"アカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"bangsi"
ja:"リュウキュウアカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"smyrnensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"chloris"
sc:"collaris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"atthis"
sc:"bengalensis"
ja:"カワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"erithaca"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"pallida"
ja:"エゾヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"calonyx"
ja:"ブッポウソウ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"chinensis"
ja:"シベリアアリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"japonica"
ja:"アリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperythrus"
sc:"subrufinus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"seebohmi"
ja:"エゾコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nippon"
ja:"コゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"matsudairai"
ja:"ミヤケコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"shikokuensis"
ja:"シコクコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kotataki"
ja:"ツシマコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kizuki"
ja:"キュウシュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"amamii"
ja:"アマミコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nigrescens"
ja:"リュウキュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"orii"
ja:"オリイコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amurensis"
ja:"コアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"subcirris"
ja:"エゾオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"stejnegeri"
ja:"オオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"namiyei"
ja:"ナミエオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"owstoni"
ja:"オーストンオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"brevirostris"
ja:"ハシブトアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"japonicus"
ja:"エゾアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"hondoensis"
ja:"アカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactylus"
sc:"inouyei"
ja:"ミユビゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"javensis"
sc:"richardsi"
ja:"キタタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"martius"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"horii"
ja:"カゴシマアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"takatsukasae"
ja:"タネアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"jessoensis"
ja:"ヤマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tinnunculus"
sc:"interstinctus"
ja:"チョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"insignis"
ja:"コチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"pacificus"
ja:"ヒガシコチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"subbuteo"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"cherrug"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"japonensis"
ja:"ハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"pealei"
ja:"オオハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"furuitii"
ja:"シマハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"anatum"
ja:"アメリカハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"sordida"
sc:"cucullata"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorynchus"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melaschistos"
sc:"intermedia"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"tegimae"
ja:"リュウキュウサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"chinensis"
sc:"diffusus"
ja:"コウライウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrocercus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophaeus"
sc:"leucogenis"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"hottentottus"
sc:"brevirostris"
ja:"カンムリオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"azurea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"illex"
ja:"リュウキュウサンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bucephalus"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"lucionensis"
ja:"シマアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"superciliosus"
ja:"アカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"collurio"
sc:"pallidifrons"
ja:"セアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"isabellinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"schach"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"mollis"
ja:"シベリアオオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"bianchii"
ja:"オオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"sphenocercus"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"brandtii"
ja:"ミヤマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"japonicus"
ja:"カケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"tokugawae"
ja:"サドカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"orii"
ja:"ヤクシマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"japonica"
ja:"オナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"macrorhynchos"
ja:"ハシナガホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"japonica"
ja:"ホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"monedula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"frugilegus"
sc:"pastinator"
ja:"ミヤマガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corone"
sc:"orientalis"
ja:"ハシボソガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"mandshuricus"
ja:"チョウセンハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"japonensis"
ja:"ハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"connectens"
ja:"リュウキュウハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"osai"
ja:"オサハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corax"
sc:"kamtschaticus"
ja:"ワタリガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"regulus"
sc:"japonensis"
ja:"キクイタダキ"
}
{
"alien":false,
rank:"subspecies"
upper:"pendulinus"
sc:"consobrinus"
ja:"ツリスガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"palustris"
sc:"hensoni"
ja:"ハシブトガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"sachalinensis"
ja:"カラフトコガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"restrictus"
ja:"コガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"namiyei"
ja:"ナミエヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"owstoni"
ja:"オーストンヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"orii"
ja:"ダイトウヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"sunsunpi"
ja:"タネヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"yakushimensis"
ja:"ヤクシマヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"amamii"
ja:"アマミヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"olivaceus"
ja:"オリイヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"ater"
sc:"insularis"
ja:"ヒガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amamiensis"
ja:"アマミシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"okinawae"
ja:"オキナワシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"nigriloris"
ja:"イシガキシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"tianschanicus"
ja:"ルリガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"biarmicus"
sc:"russicus"
ja:"ヒゲガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"bimaculata"
sc:"torquata"
ja:"クビワコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"brachydactyla"
sc:"longipennis"
ja:"ヒメコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheleensis"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"pekinensis"
ja:"オオヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"lonnbergi"
ja:"カラフトチュウヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"japonica"
ja:"ヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpestris"
sc:"flava"
ja:"ハマヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"paludicola"
sc:"chinensis"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"riparia"
sc:"ijimae"
ja:"ショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"saturata"
ja:"アカハラツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"gutturalis"
ja:"ツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"tahitica"
sc:"namiyei"
ja:"リュウキュウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"daurica"
sc:"japonica"
ja:"コシアカツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"urbicum"
sc:"lagopodum"
ja:"ニシイワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"dasypus"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"squamiceps"
ja:"オガサワラヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"magnirostris"
ja:"ハシブトヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"borodinonis"
ja:"ダイトウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"ogawae"
ja:"アマミヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"pryeri"
ja:"リュウキュウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"stejnegeri"
ja:"イシガキヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"nagamichii"
ja:"タイワンヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"sakhalinensis"
ja:"カラフトウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"borealis"
ja:"チョウセンウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"cantans"
ja:"ウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"diphone"
ja:"ハシナガウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"restricta"
ja:"ダイトウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"riukiuensis"
ja:"リュウキュウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"japonicus"
ja:"シマエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"magnus"
ja:"チョウセンエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"trivirgatus"
ja:"エナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"kiusiuensis"
ja:"キュウシュウエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"trochilus"
sc:"yakutensis"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"collybita"
sc:"tristis"
ja:"チフチャフ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscatus"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"affinis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"kennicotti"
ja:"アメリカコムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"curruca"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"hahasima"
ja:"ハハジマメグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"daitoensis"
ja:"ダイトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"insularis"
ja:"シマメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"loochooensis"
ja:"リュウキュウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"certhiola"
sc:"rubescens"
ja:"シベリアセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"pryeri"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"fasciolata"
sc:"amnicola"
ja:"エゾセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bistrigiceps"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"agricola"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aedon"
sc:"stegmanni"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"juncidis"
sc:"brunniceps"
ja:"セッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"garrulus"
sc:"centralasiae"
ja:"キレンジャク"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"asiatica"
ja:"シロハラゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"amurensis"
ja:"ゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"roseilia"
ja:"キュウシュウゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"daurica"
ja:"キタキバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"japonica"
ja:"キバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"dauricus"
ja:"チョウセンミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"fumigatus"
ja:"ミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"mosukei"
ja:"モスケミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"orii"
ja:"ダイトウミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"ogawae"
ja:"オガワミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"vulgaris"
sc:"poltaratskyi"
ja:"ホシムクドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasii"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"davisoni"
ja:"マミジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"aurea"
ja:"<NAME>ツグ<NAME>"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"major"
ja:"<NAME>オトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"iriomotensis"
ja:"コトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"minimus"
sc:"aliciae"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"merula"
sc:"mandarinus"
ja:"クロウタドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"orii"
ja:"<NAME>ハラ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"chrysolaus"
ja:"<NAME>"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"atrogularis"
ja:"<NAME>ドグロツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"ruficollis"
ja:"<NAME>ツ<NAME>"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"eunomus"
ja:"ツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"naumanni"
ja:"<NAME>チジョウツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"iliacus"
sc:"iliacus"
ja:"<NAME>キアカツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"viscivorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"rubecula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"tanensis"
ja:"タネコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"namiyei"
ja:"ホントウアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"subrufus"
ja:"ウスアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"svecica"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyane"
sc:"bochaiensis"
ja:"コルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanurus"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"ochruros"
sc:"rufiventris"
ja:"クロジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"auroreus"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquatus"
sc:"stejnegeri"
ja:"ノビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"caprata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"oenanthe"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"deserti"
sc:"oreophila"
ja:"サバクヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"pandoo"
ja:"アオハライソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"philippensis"
ja:"イソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"mongola"
ja:"ムナフヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauurica"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"hypoleuca"
sc:"sibirica"
ja:"マダラヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"owstoni"
ja:"リュウキュウキビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cumatilis"
ja:"チョウセンオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"thalassinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"vivida"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"collaris"
sc:"erythropygia"
ja:"イワヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanella"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"domesticus"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rutilans"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"saturatus"
ja:"スズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"plexa"
ja:"シベリアツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"leucocephala"
ja:"カオジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"macronyx"
ja:"キタツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"simillima"
ja:"マミジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"taivana"
ja:"ツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"citreola"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"dukhunensis"
ja:"ニシシベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"personata"
ja:"メンガタハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alboides"
ja:"ネパールハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"baicalensis"
ja:"シベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"ocularis"
ja:"タイワンハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"lugens"
ja:"ハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"leucopsis"
ja:"ホオジロハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"richardi"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pratensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"trivialis"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"yunnanensis"
ja:"カラフトビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"gustavi"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubescens"
sc:"japonicus"
ja:"タヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"coelebs"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kawarahiba"
ja:"オオカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"minor"
ja:"カワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kittlitzi"
ja:"オガサワラカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"flammea"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"hornemanni"
sc:"exilipes"
ja:"コベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"arctoa"
sc:"brunneonucha"
ja:"ハギマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibiricus"
sc:"sanguinolentus"
ja:"ベニマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"erythrinus"
sc:"grebnitskii"
ja:"アカマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"kamtschatkensis"
ja:"コバシギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"sakhalinensis"
ja:"ギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"curvirostra"
sc:"japonica"
ja:"イスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucoptera"
sc:"bifasciata"
ja:"ナキイスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"cassinii"
ja:"ベニバラウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"rosacea"
ja:"アカウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"griseiventris"
ja:"ウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"coccothraustes"
ja:"シベリアシメ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"japonicus"
ja:"シメ"
}
{
"alien":false,
rank:"subspecies"
upper:"migratoria"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
rank:"subspecies"
upper:"personata"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponicus"
sc:"coloratus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"vlasowae"
ja:"ユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"townsendi"
ja:"オオユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"coronata"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"citrinella"
sc:"erythrogenys"
ja:"キアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalos"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"cioides"
sc:"ciopsis"
ja:"ホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"buchanani"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"fucata"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"latifascia"
ja:"カシラダカ"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"aureola"
sc:"ornata"
ja:"シマアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"spodocephala"
ja:"シベリアアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"personata"
ja:"アオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"polaris"
ja:"シベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"pallasi"
ja:"オオシベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"yessoensis"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"schoeniclus"
sc:"pyrrhulina"
ja:"オオジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"iliaca"
sc:"unalaschcensis"
ja:"ゴマフスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melodia"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophrys"
sc:"gambelii"
ja:"ミヤマシトド"
}
{
"alien":false,
rank:"subspecies"
upper:"sandwichensis"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"sonorivox"
ja:"テッケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"karpowi"
ja:"コウライキジ"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"canadensis"
sc:"moffitti"
ja:"オオカナダガン"
}
{
"alien":true,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
rank:"subspecies"
upper:"himantopus"
sc:"mexicanus"
ja:"クロエリセイタカシギ"
}
{
"alien":true,
rank:"subspecies"
upper:"krameri"
sc:"manillensis"
ja:"ワカケホンセイインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"alexandri"
sc:"fasciata"
ja:"ダルマインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"monachus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"canorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cineraceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"sannio"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"lutea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cristatellus"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
rank:"subspecies"
upper:"tristis"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
rank:"subspecies"
upper:"contra"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"intermedius"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"orix"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"melpoda"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"amandava"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"striata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"punctulata"
sc:"topela"
ja:"シマキンパラ"
}
{
"alien":true,
rank:"subspecies"
upper:"atricapilla"
sc:"ssp."
ja:"亜種不明"
}
]
db.createCollection 'names'
rank_relationships =
family: "order"
genus: "family"
species: "genus"
subspecies: "species"
for name in data_names
if db.names.find(name).count() is 0
if name.upper
upper_rank = rank_relationships[name.rank]
upper_id = db.names.find({
rank: upper_rank
sc: name.upper
})[0]._id
name.upper = upper_rank
name.upper_id = upper_id
db.names.insert name
| true | # mongo birdAPI
# this is db import script
db.dropDatabase()
data_names = [
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"podicipediformes"
ja:"カイツブリ目"
rank:"order"
}
{
sc:"phaethontiformes"
ja:"ネッタイチョウ目"
rank:"order"
}
{
sc:"pterocliformes"
ja:"サケイ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"gaviiformes"
ja:"アビ目"
rank:"order"
}
{
sc:"procellariiformes"
ja:"ミズナギドリ目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"suliformes"
ja:"カツオドリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"gruiformes"
ja:"ツル目"
rank:"order"
}
{
sc:"otidiformes"
ja:"ノガン目"
rank:"order"
}
{
sc:"cuculiformes"
ja:"カッコウ目"
rank:"order"
}
{
sc:"caprimulgiformes"
ja:"ヨタカ目"
rank:"order"
}
{
sc:"apodiformes"
ja:"アマツバメ目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"accipitriformes"
ja:"タカ目"
rank:"order"
}
{
sc:"strigiformes"
ja:"フクロウ目"
rank:"order"
}
{
sc:"bucerotiformes"
ja:"サイチョウ目"
rank:"order"
}
{
sc:"coraciiformes"
ja:"ブッポウソウ目"
rank:"order"
}
{
sc:"piciformes"
ja:"キツツキ目"
rank:"order"
}
{
sc:"falconiformes"
ja:"ハヤブサ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"galliformes"
ja:"キジ目"
rank:"order"
}
{
sc:"anseriformes"
ja:"カモ目"
rank:"order"
}
{
sc:"columbiformes"
ja:"ハト目"
rank:"order"
}
{
sc:"ciconiiformes"
ja:"コウノトリ目"
rank:"order"
}
{
sc:"pelecaniformes"
ja:"ペリカン目"
rank:"order"
}
{
sc:"charadriiformes"
ja:"チドリ目"
rank:"order"
}
{
sc:"psittaciformes"
ja:"インコ目"
rank:"order"
}
{
sc:"passeriformes"
ja:"スズメ目"
rank:"order"
}
{
sc:"phasianidae"
ja:"キジ科"
rank:"family"
upper:"galliformes"
}
{
sc:"anatidae"
ja:"カモ科"
rank:"family"
upper:"anseriformes"
}
{
sc:"podicipedidae"
ja:"カイツブリ科"
rank:"family"
upper:"podicipediformes"
}
{
sc:"phaethontidae"
ja:"ネッタイチョウ科"
rank:"family"
upper:"phaethontiformes"
}
{
sc:"pteroclidae"
ja:"サケイ科"
rank:"family"
upper:"pterocliformes"
}
{
sc:"columbidae"
ja:"ハト科"
rank:"family"
upper:"columbiformes"
}
{
sc:"gaviidae"
ja:"アビ科"
rank:"family"
upper:"gaviiformes"
}
{
sc:"diomedeidae"
ja:"アホウドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"procellariidae"
ja:"ミズナギドリ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"hydrobatidae"
ja:"ウミツバメ科"
rank:"family"
upper:"procellariiformes"
}
{
sc:"ciconiidae"
ja:"コウノトリ科"
rank:"family"
upper:"ciconiiformes"
}
{
sc:"fregatidae"
ja:"グンカンドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"sulidae"
ja:"カツオドリ科"
rank:"family"
upper:"suliformes"
}
{
sc:"phalacrocoracidae"
ja:"ウ科"
rank:"family"
upper:"suliformes"
}
{
sc:"pelecanidae"
ja:"ペリカン科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"ardeidae"
ja:"サギ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"threskiornithidae"
ja:"トキ科"
rank:"family"
upper:"pelecaniformes"
}
{
sc:"gruidae"
ja:"ツル科"
rank:"family"
upper:"gruiformes"
}
{
sc:"rallidae"
ja:"クイナ科"
rank:"family"
upper:"gruiformes"
}
{
sc:"otididae"
ja:"ノガン科"
rank:"family"
upper:"otidiformes"
}
{
sc:"cuculidae"
ja:"カッコウ科"
rank:"family"
upper:"cuculiformes"
}
{
sc:"caprimulgidae"
ja:"ヨタカ科"
rank:"family"
upper:"caprimulgiformes"
}
{
sc:"apodidae"
ja:"アマツバメ科"
rank:"family"
upper:"apodiformes"
}
{
sc:"charadriidae"
ja:"チドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"haematopodidae"
ja:"ミヤコドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"recurvirostridae"
ja:"セイタカシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"scolopacidae"
ja:"シギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"jacanidae"
ja:"レンカク科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"rostratulidae"
ja:"タマシギ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"turnicidae"
ja:"ミフウズラ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"glareolidae"
ja:"ツバメチドリ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"laridae"
ja:"カモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"stercorariidae"
ja:"トウゾクカモメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"alcidae"
ja:"ウミスズメ科"
rank:"family"
upper:"charadriiformes"
}
{
sc:"pandionidae"
ja:"ミサゴ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"accipitridae"
ja:"タカ科"
rank:"family"
upper:"accipitriformes"
}
{
sc:"tytonidae"
ja:"メンフクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"strigidae"
ja:"フクロウ科"
rank:"family"
upper:"strigiformes"
}
{
sc:"upupidae"
ja:"ヤツガシラ科"
rank:"family"
upper:"bucerotiformes"
}
{
sc:"alcedinidae"
ja:"カワセミ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"meropidae"
ja:"ハチクイ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"coraciidae"
ja:"ブッポウソウ科"
rank:"family"
upper:"coraciiformes"
}
{
sc:"picidae"
ja:"キツツキ科"
rank:"family"
upper:"piciformes"
}
{
sc:"falconidae"
ja:"ハヤブサ科"
rank:"family"
upper:"falconiformes"
}
{
sc:"pittidae"
ja:"ヤイロチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"artamidae"
ja:"モリツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"campephagidae"
ja:"サンショウクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"oriolidae"
ja:"コウライウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"dicruridae"
ja:"オウチュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"monarchidae"
ja:"カササギヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"laniidae"
ja:"モズ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"corvidae"
ja:"カラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"regulidae"
ja:"キクイタダキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"remizidae"
ja:"ツリスガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"paridae"
ja:"シジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"panuridae"
ja:"ヒゲガラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"alaudidae"
ja:"ヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"hirundinidae"
ja:"ツバメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"pycnonotidae"
ja:"ヒヨドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cettiidae"
ja:"ウグイス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"aegithalidae"
ja:"エナガ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"phylloscopidae"
ja:"ムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sylviidae"
ja:"ズグロムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"zosteropidae"
ja:"メジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"locustellidae"
ja:"センニュウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"acrocephalidae"
ja:"ヨシキリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cisticolidae"
ja:"セッカ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"bombycillidae"
ja:"レンジャク科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sittidae"
ja:"ゴジュウカラ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"certhiidae"
ja:"キバシリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"troglodytidae"
ja:"ミソサザイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"sturnidae"
ja:"ムクドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"cinclidae"
ja:"カワガラス科"
rank:"family"
upper:"passeriformes"
}
{
sc:"muscicapidae"
ja:"ヒタキ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"prunellidae"
ja:"イワヒバリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"passeridae"
ja:"スズメ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"motacillidae"
ja:"セキレイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"fringillidae"
ja:"アトリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"calcariidae"
ja:"ツメナガホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"parulidae"
ja:"アメリカムシクイ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"emberizidae"
ja:"ホオジロ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"psittacidae"
ja:"インコ科"
rank:"family"
upper:"psittaciformes"
}
{
sc:"timaliidae"
ja:"チメドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"ploceidae"
ja:"ハタオリドリ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"estrildidae"
ja:"カエデチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"viduidae"
ja:"テンニンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"thraupidae"
ja:"フウキンチョウ科"
rank:"family"
upper:"passeriformes"
}
{
sc:"tetrastes"
ja:"エゾライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"lagopus"
ja:"ライチョウ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"coturnix"
ja:"ウズラ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"dendrocygna"
ja:"リュウキュウガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anser"
ja:"マガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tadorna"
ja:"ツクシガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aix"
ja:"オシドリ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"nettapus"
ja:"ナンキンオシ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"anas"
ja:"マガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"netta"
ja:"アカハシハジロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"aythya"
ja:"スズガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"polysticta"
ja:"コケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"somateria"
ja:"ケワタガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"histrionicus"
ja:"シノリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"melanitta"
ja:"ビロードキンクロ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"clangula"
ja:"コオリガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"bucephala"
ja:"ホオジロガモ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergellus"
ja:"ミコアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"mergus"
ja:"ウミアイサ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"tachybaptus"
ja:"カイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"podiceps"
ja:"カンムリカイツブリ属"
rank:"genus"
upper:"podicipedidae"
}
{
sc:"phaethon"
ja:"ネッタイチョウ属"
rank:"genus"
upper:"phaethontidae"
}
{
sc:"syrrhaptes"
ja:"サケイ属"
rank:"genus"
upper:"pteroclidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"chalcophaps"
ja:"キンバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"treron"
ja:"アオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ptilinopus"
ja:"ヒメアオバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"gavia"
ja:"アビ属"
rank:"genus"
upper:"gaviidae"
}
{
sc:"phoebastria"
ja:"アホウドリ属"
rank:"genus"
upper:"diomedeidae"
}
{
sc:"fulmarus"
ja:"フルマカモメ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"pterodroma"
ja:"シロハラミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"calonectris"
ja:"オオミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"puffinus"
ja:"ハイイロミズナギドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"bulweria"
ja:"アナドリ属"
rank:"genus"
upper:"procellariidae"
}
{
sc:"oceanites"
ja:"アシナガウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"oceanodroma"
ja:"オーストンウミツバメ属"
rank:"genus"
upper:"hydrobatidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"fregata"
ja:"グンカンドリ属"
rank:"genus"
upper:"fregatidae"
}
{
sc:"sula"
ja:"カツオドリ属"
rank:"genus"
upper:"sulidae"
}
{
sc:"phalacrocorax"
ja:"ウ属"
rank:"genus"
upper:"phalacrocoracidae"
}
{
sc:"pelecanus"
ja:"ペリカン属"
rank:"genus"
upper:"pelecanidae"
}
{
sc:"botaurus"
ja:"サンカノゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ixobrychus"
ja:"ヨシゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"gorsachius"
ja:"ミゾゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"nycticorax"
ja:"ゴイサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"butorides"
ja:"ササゴイ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardeola"
ja:"アカガシラサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"bubulcus"
ja:"アマサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"ardea"
ja:"アオサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"egretta"
ja:"コサギ属"
rank:"genus"
upper:"ardeidae"
}
{
sc:"threskiornis"
ja:"クロトキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"platalea"
ja:"ヘラサギ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"grus"
ja:"ツル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"anthropoides"
ja:"アネハヅル属"
rank:"genus"
upper:"gruidae"
}
{
sc:"coturnicops"
ja:"シマクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallina"
ja:"オオクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallirallus"
ja:"ヤンバルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"rallus"
ja:"クイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"amaurornis"
ja:"シロハラクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"porzana"
ja:"ヒメクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallicrex"
ja:"ツルクイナ属"
rank:"genus"
upper:"rallidae"
}
{
sc:"gallinula"
ja:"バン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"fulica"
ja:"オオバン属"
rank:"genus"
upper:"rallidae"
}
{
sc:"otis"
ja:"ノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"tetrax"
ja:"ヒメノガン属"
rank:"genus"
upper:"otididae"
}
{
sc:"centropus"
ja:"バンケン属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"clamator"
ja:"カンムリカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"eudynamys"
ja:"オニカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"urodynamis"
ja:"キジカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"surniculus"
ja:"オウチュウカッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"hierococcyx"
ja:"ジュウイチ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"cuculus"
ja:"カッコウ属"
rank:"genus"
upper:"cuculidae"
}
{
sc:"caprimulgus"
ja:"ヨタカ属"
rank:"genus"
upper:"caprimulgidae"
}
{
sc:"aerodramus"
ja:"ヒマラヤアナツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"hirundapus"
ja:"ハリオアマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"apus"
ja:"アマツバメ属"
rank:"genus"
upper:"apodidae"
}
{
sc:"vanellus"
ja:"タゲリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"pluvialis"
ja:"ムナグロ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"charadrius"
ja:"チドリ属"
rank:"genus"
upper:"charadriidae"
}
{
sc:"haematopus"
ja:"ミヤコドリ属"
rank:"genus"
upper:"haematopodidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"recurvirostra"
ja:"ソリハシセイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"scolopax"
ja:"ヤマシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"lymnocryptes"
ja:"コシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"gallinago"
ja:"タシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limnodromus"
ja:"オオハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limosa"
ja:"オグロシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"numenius"
ja:"ダイシャクシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tringa"
ja:"クサシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"heteroscelus"
ja:"キアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"xenus"
ja:"ソリハシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"actitis"
ja:"イソシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"arenaria"
ja:"キョウジョシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"calidris"
ja:"オバシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"eurynorhynchus"
ja:"ヘラシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"limicola"
ja:"キリアイ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"tryngites"
ja:"コモンシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"philomachus"
ja:"エリマキシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"phalaropus"
ja:"ヒレアシシギ属"
rank:"genus"
upper:"scolopacidae"
}
{
sc:"hydrophasianus"
ja:"レンカク属"
rank:"genus"
upper:"jacanidae"
}
{
sc:"rostratula"
ja:"タマシギ属"
rank:"genus"
upper:"rostratulidae"
}
{
sc:"turnix"
ja:"ミフウズラ属"
rank:"genus"
upper:"turnicidae"
}
{
sc:"glareola"
ja:"ツバメチドリ属"
rank:"genus"
upper:"glareolidae"
}
{
sc:"anous"
ja:"クロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"procelsterna"
ja:"ハイイロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gygis"
ja:"シロアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rissa"
ja:"ミツユビカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"pagophila"
ja:"ゾウゲカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"xema"
ja:"クビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"rhodostethia"
ja:"ヒメクビワカモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"larus"
ja:"カモメ属"
rank:"genus"
upper:"laridae"
}
{
sc:"gelochelidon"
ja:"ハシブトアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"sterna"
ja:"アジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"chlidonias"
ja:"クロハラアジサシ属"
rank:"genus"
upper:"laridae"
}
{
sc:"stercorarius"
ja:"トウゾクカモメ属"
rank:"genus"
upper:"stercorariidae"
}
{
sc:"alle"
ja:"ヒメウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"uria"
ja:"ウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"alca"
ja:"オオハシウミガラス属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cepphus"
ja:"ウミバト属"
rank:"genus"
upper:"alcidae"
}
{
sc:"brachyramphus"
ja:"マダラウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"synthliboramphus"
ja:"ウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"aethia"
ja:"エトロフウミスズメ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"cerorhinca"
ja:"ウトウ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"fratercula"
ja:"ツノメドリ属"
rank:"genus"
upper:"alcidae"
}
{
sc:"pandion"
ja:"ミサゴ属"
rank:"genus"
upper:"pandionidae"
}
{
sc:"pernis"
ja:"ハチクマ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"elanus"
ja:"カタグロトビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"milvus"
ja:"トビ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"haliaeetus"
ja:"オジロワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aegypius"
ja:"クロハゲワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"spilornis"
ja:"カンムリワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"circus"
ja:"チュウヒ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"accipiter"
ja:"ハイタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"butastur"
ja:"サシバ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"buteo"
ja:"ノスリ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"aquila"
ja:"イヌワシ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"nisaetus"
ja:"クマタカ属"
rank:"genus"
upper:"accipitridae"
}
{
sc:"tyto"
ja:"メンフクロウ属"
rank:"genus"
upper:"tytonidae"
}
{
sc:"otus"
ja:"コノハズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"bubo"
ja:"ワシミミズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ketupa"
ja:"シマフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"strix"
ja:"フクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"aegolius"
ja:"キンメフクロウ属"
rank:"genus"
upper:"strigidae"
}
{
sc:"ninox"
ja:"アオバズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"asio"
ja:"トラフズク属"
rank:"genus"
upper:"strigidae"
}
{
sc:"upupa"
ja:"ヤツガシラ属"
rank:"genus"
upper:"upupidae"
}
{
sc:"halcyon"
ja:"アカショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"todiramphus"
ja:"ナンヨウショウビン属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"alcedo"
ja:"カワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"ceyx"
ja:"ミツユビカワセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"megaceryle"
ja:"ヤマセミ属"
rank:"genus"
upper:"alcedinidae"
}
{
sc:"merops"
ja:"ハチクイ属"
rank:"genus"
upper:"meropidae"
}
{
sc:"eurystomus"
ja:"ブッポウソウ属"
rank:"genus"
upper:"coraciidae"
}
{
sc:"jynx"
ja:"アリスイ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dendrocopos"
ja:"アカゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picoides"
ja:"ミユビゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"dryocopus"
ja:"クマゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"picus"
ja:"アオゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"sapheopipo"
ja:"ノグチゲラ属"
rank:"genus"
upper:"picidae"
}
{
sc:"falco"
ja:"ハヤブサ属"
rank:"genus"
upper:"falconidae"
}
{
sc:"pitta"
ja:"ヤイロチョウ属"
rank:"genus"
upper:"pittidae"
}
{
sc:"artamus"
ja:"モリツバメ属"
rank:"genus"
upper:"artamidae"
}
{
sc:"coracina"
ja:"アサクラサンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"pericrocotus"
ja:"サンショウクイ属"
rank:"genus"
upper:"campephagidae"
}
{
sc:"oriolus"
ja:"コウライウグイス属"
rank:"genus"
upper:"oriolidae"
}
{
sc:"dicrurus"
ja:"オウチュウ属"
rank:"genus"
upper:"dicruridae"
}
{
sc:"hypothymis"
ja:"クロエリヒタキ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"terpsiphone"
ja:"サンコウチョウ属"
rank:"genus"
upper:"monarchidae"
}
{
sc:"lanius"
ja:"モズ属"
rank:"genus"
upper:"laniidae"
}
{
sc:"garrulus"
ja:"カケス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"cyanopica"
ja:"オナガ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"nucifraga"
ja:"ホシガラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"corvus"
ja:"カラス属"
rank:"genus"
upper:"corvidae"
}
{
sc:"regulus"
ja:"キクイタダキ属"
rank:"genus"
upper:"regulidae"
}
{
sc:"remiz"
ja:"ツリスガラ属"
rank:"genus"
upper:"remizidae"
}
{
sc:"poecile"
ja:"コガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"periparus"
ja:"ヒガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"parus"
ja:"シジュウカラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"cyanistes"
ja:"ルリガラ属"
rank:"genus"
upper:"paridae"
}
{
sc:"panurus"
ja:"ヒゲガラ属"
rank:"genus"
upper:"panuridae"
}
{
sc:"melanocorypha"
ja:"コウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"calandrella"
ja:"ヒメコウテンシ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"alauda"
ja:"ヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"eremophila"
ja:"ハマヒバリ属"
rank:"genus"
upper:"alaudidae"
}
{
sc:"riparia"
ja:"ショウドウツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"tachycineta"
ja:"ミドリツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"hirundo"
ja:"ツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"delichon"
ja:"イワツバメ属"
rank:"genus"
upper:"hirundinidae"
}
{
sc:"pycnonotus"
ja:"シロガシラ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"hypsipetes"
ja:"ヒヨドリ属"
rank:"genus"
upper:"pycnonotidae"
}
{
sc:"cettia"
ja:"ウグイス属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"urosphena"
ja:"ヤブサメ属"
rank:"genus"
upper:"cettiidae"
}
{
sc:"aegithalos"
ja:"エナガ属"
rank:"genus"
upper:"aegithalidae"
}
{
sc:"phylloscopus"
ja:"ムシクイ属"
rank:"genus"
upper:"phylloscopidae"
}
{
sc:"sylvia"
ja:"ズグロムシクイ属"
rank:"genus"
upper:"sylviidae"
}
{
sc:"apalopteron"
ja:"メグロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"locustella"
ja:"センニュウ属"
rank:"genus"
upper:"locustellidae"
}
{
sc:"acrocephalus"
ja:"ヨシキリ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"iduna"
ja:"ヒメウタイムシクイ属"
rank:"genus"
upper:"acrocephalidae"
}
{
sc:"cisticola"
ja:"セッカ属"
rank:"genus"
upper:"cisticolidae"
}
{
sc:"bombycilla"
ja:"レンジャク属"
rank:"genus"
upper:"bombycillidae"
}
{
sc:"sitta"
ja:"ゴジュウカラ属"
rank:"genus"
upper:"sittidae"
}
{
sc:"certhia"
ja:"キバシリ属"
rank:"genus"
upper:"certhiidae"
}
{
sc:"troglodytes"
ja:"ミソサザイ属"
rank:"genus"
upper:"troglodytidae"
}
{
sc:"spodiopsar"
ja:"ムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"agropsar"
ja:"コムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnia"
ja:"カラムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"pastor"
ja:"バライロムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"sturnus"
ja:"ホシムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"cinclus"
ja:"カワガラス属"
rank:"genus"
upper:"cinclidae"
}
{
sc:"zoothera"
ja:"トラツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cichlopasser"
ja:"オガサワラガビチョウ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"catharus"
ja:"チャツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"turdus"
ja:"ツグミ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"erithacus"
ja:"ヨーロッパコマドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"luscinia"
ja:"ノゴマ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"tarsiger"
ja:"ルリビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"phoenicurus"
ja:"ジョウビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"saxicola"
ja:"ノビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"oenanthe"
ja:"サバクヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"monticola"
ja:"イソヒヨドリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"muscicapa"
ja:"サメビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"ficedula"
ja:"キビタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"cyanoptila"
ja:"オオルリ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"eumyias"
ja:"アイイロヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"niltava"
ja:"アオヒタキ属"
rank:"genus"
upper:"muscicapidae"
}
{
sc:"prunella"
ja:"カヤクグリ属"
rank:"genus"
upper:"prunellidae"
}
{
sc:"passer"
ja:"スズメ属"
rank:"genus"
upper:"passeridae"
}
{
sc:"dendronanthus"
ja:"イワミセキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"motacilla"
ja:"セキレイ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"anthus"
ja:"タヒバリ属"
rank:"genus"
upper:"motacillidae"
}
{
sc:"fringilla"
ja:"アトリ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chloris"
ja:"カワラヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carduelis"
ja:"マヒワ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"leucosticte"
ja:"ハギマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"uragus"
ja:"ベニマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"chaunoproctus"
ja:"オガサワラマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"carpodacus"
ja:"オオマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pinicola"
ja:"ギンザンマシコ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"loxia"
ja:"イスカ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"pyrrhula"
ja:"ウソ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"coccothraustes"
ja:"シメ属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"eophona"
ja:"イカル属"
rank:"genus"
upper:"fringillidae"
}
{
sc:"calcarius"
ja:"ツメナガホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"plectrophenax"
ja:"ユキホオジロ属"
rank:"genus"
upper:"calcariidae"
}
{
sc:"setophaga"
ja:"ハゴロモムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"cardellina"
ja:"アカガオアメリカムシクイ属"
rank:"genus"
upper:"parulidae"
}
{
sc:"emberiza"
ja:"ホオジロ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerella"
ja:"ゴマフスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"melospiza"
ja:"ウタスズメ属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"zonotrichia"
ja:"ミヤマシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"passerculus"
ja:"サバンナシトド属"
rank:"genus"
upper:"emberizidae"
}
{
sc:"bambusicola"
ja:"コジュケイ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"syrmaticus"
ja:"ヤマドリ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"phasianus"
ja:"キジ属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"pavo"
ja:"クジャク属"
rank:"genus"
upper:"phasianidae"
}
{
sc:"branta"
ja:"コクガン属"
rank:"genus"
upper:"anatidae"
}
{
sc:"cygnus"
ja:"ハクチョウ属"
rank:"genus"
upper:"anatidae"
}
{
sc:"columba"
ja:"カワラバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"streptopelia"
ja:"キジバト属"
rank:"genus"
upper:"columbidae"
}
{
sc:"ciconia"
ja:"コウノトリ属"
rank:"genus"
upper:"ciconiidae"
}
{
sc:"nipponia"
ja:"トキ属"
rank:"genus"
upper:"threskiornithidae"
}
{
sc:"himantopus"
ja:"セイタカシギ属"
rank:"genus"
upper:"recurvirostridae"
}
{
sc:"melopsittacus"
ja:"セキセイインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"psittacula"
ja:"ダルマインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"myiopsitta"
ja:"オキナインコ属"
rank:"genus"
upper:"psittacidae"
}
{
sc:"urocissa"
ja:"サンジャク属"
rank:"genus"
upper:"corvidae"
}
{
sc:"pica"
ja:"カササギ属"
rank:"genus"
upper:"corvidae"
}
{
sc:"zosterops"
ja:"メジロ属"
rank:"genus"
upper:"zosteropidae"
}
{
sc:"garrulax"
ja:"ガビチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"leiothrix"
ja:"ソウシチョウ属"
rank:"genus"
upper:"timaliidae"
}
{
sc:"acridotheres"
ja:"ハッカチョウ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"gracupica"
ja:"クビワムクドリ属"
rank:"genus"
upper:"sturnidae"
}
{
sc:"ploceus"
ja:"キハタオリ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"euplectes"
ja:"キンランチョウ属"
rank:"genus"
upper:"ploceidae"
}
{
sc:"estrilda"
ja:"カエデチョウ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"amandava"
ja:"ベニスズメ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"lonchura"
ja:"キンパラ属"
rank:"genus"
upper:"estrildidae"
}
{
sc:"vidua"
ja:"テンニンチョウ属"
rank:"genus"
upper:"viduidae"
}
{
sc:"paroaria"
ja:"コウカンチョウ属"
rank:"genus"
upper:"thraupidae"
}
,{
"alien":false,
upper:"tetrastes"
rank:"species"
sc:"bonasia"
ja:"エゾライチョウ"
}
{
"alien":false,
upper:"lagopus"
rank:"species"
sc:"muta"
ja:"ライチョウ"
}
{
"alien":false,
upper:"coturnix"
rank:"species"
sc:"japonica"
ja:"ウズラ"
}
{
"alien":false,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":false,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":false,
upper:"dendrocygna"
rank:"species"
sc:"javanica"
ja:"リュウキュウガモ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"cygnoides"
ja:"サカツラガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"fabalis"
ja:"ヒシクイ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"anser"
ja:"ハイイロガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"erythropus"
ja:"カリガネ"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"indicus"
ja:"インドガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"caerulescens"
ja:"ハクガン"
}
{
"alien":false,
upper:"anser"
rank:"species"
sc:"canagicus"
ja:"ミカドガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"hutchinsii"
ja:"シジュウカラガン"
}
{
"alien":false,
upper:"branta"
rank:"species"
sc:"bernicla"
ja:"コクガン"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"buccinator"
ja:"ナキハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"columbianus"
ja:"コハクチョウ"
}
{
"alien":false,
upper:"cygnus"
rank:"species"
sc:"cygnus"
ja:"オオハクチョウ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"tadorna"
ja:"ツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"ferruginea"
ja:"アカツクシガモ"
}
{
"alien":false,
upper:"tadorna"
rank:"species"
sc:"cristata"
ja:"カンムリツクシガモ"
}
{
"alien":false,
upper:"aix"
rank:"species"
sc:"galericulata"
ja:"オシドリ"
}
{
"alien":false,
upper:"nettapus"
rank:"species"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"falcata"
ja:"ヨシガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"penelope"
ja:"ヒドリガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"americana"
ja:"アメリカヒドリ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"luzonica"
ja:"アカノドカルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"zonorhyncha"
ja:"カルガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"discors"
ja:"ミカヅキシマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"clypeata"
ja:"ハシビロガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"acuta"
ja:"オナガガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"querquedula"
ja:"シマアジ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"formosa"
ja:"トモエガモ"
}
{
"alien":false,
upper:"anas"
rank:"species"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
upper:"netta"
rank:"species"
sc:"rufina"
ja:"アカハシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"valisineria"
ja:"オオホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"americana"
ja:"アメリカホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"ferina"
ja:"ホシハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"baeri"
ja:"アカハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"nyroca"
ja:"メジロガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"collaris"
ja:"クビワキンクロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"fuligula"
ja:"キンクロハジロ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
upper:"aythya"
rank:"species"
sc:"affinis"
ja:"コスズガモ"
}
{
"alien":false,
upper:"polysticta"
rank:"species"
sc:"stelleri"
ja:"コケワタガモ"
}
{
"alien":false,
upper:"somateria"
rank:"species"
sc:"spectabilis"
ja:"ケワタガモ"
}
{
"alien":false,
upper:"histrionicus"
rank:"species"
sc:"histrionicus"
ja:"シノリガモ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"perspicillata"
ja:"アラナミキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"fusca"
ja:"ビロードキンクロ"
}
{
"alien":false,
upper:"melanitta"
rank:"species"
sc:"americana"
ja:"クロガモ"
}
{
"alien":false,
upper:"clangula"
rank:"species"
sc:"hyemalis"
ja:"コオリガモ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"albeola"
ja:"ヒメハジロ"
}
{
"alien":false,
upper:"bucephala"
rank:"species"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
upper:"mergellus"
rank:"species"
sc:"albellus"
ja:"ミコアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"serrator"
ja:"ウミアイサ"
}
{
"alien":false,
upper:"mergus"
rank:"species"
sc:"squamatus"
ja:"コウライアイサ"
}
{
"alien":false,
upper:"tachybaptus"
rank:"species"
sc:"ruficollis"
ja:"カイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"grisegena"
ja:"アカエリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
upper:"podiceps"
rank:"species"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"rubricauda"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
upper:"phaethon"
rank:"species"
sc:"lepturus"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
upper:"syrrhaptes"
rank:"species"
sc:"paradoxus"
ja:"サケイ"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"oenas"
ja:"ヒメモリバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"versicolor"
ja:"オガサワラカラスバト"
}
{
"alien":false,
upper:"columba"
rank:"species"
sc:"jouyi"
ja:"リュウキュウカラスバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
upper:"streptopelia"
rank:"species"
sc:"tranquebarica"
ja:"ベニバト"
}
{
"alien":false,
upper:"chalcophaps"
rank:"species"
sc:"indica"
ja:"キンバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
upper:"treron"
rank:"species"
sc:"formosae"
ja:"ズアカアオバト"
}
{
"alien":false,
upper:"ptilinopus"
rank:"species"
sc:"leclancheri"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"stellata"
ja:"アビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"arctica"
ja:"オオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"pacifica"
ja:"シロエリオオハム"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"immer"
ja:"ハシグロアビ"
}
{
"alien":false,
upper:"gavia"
rank:"species"
sc:"adamsii"
ja:"ハシジロアビ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"immutabilis"
ja:"コアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"nigripes"
ja:"クロアシアホウドリ"
}
{
"alien":false,
upper:"phoebastria"
rank:"species"
sc:"albatrus"
ja:"アホウドリ"
}
{
"alien":false,
upper:"fulmarus"
rank:"species"
sc:"glacialis"
ja:"フルマカモメ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"solandri"
ja:"ハジロミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"externa"
ja:"オオシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"neglecta"
ja:"カワリシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"phaeopygia"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"inexpectata"
ja:"マダラシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"nigripennis"
ja:"ハグロシロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"hypoleuca"
ja:"シロハラミズナギドリ"
}
{
"alien":false,
upper:"pterodroma"
rank:"species"
sc:"longirostris"
ja:"ヒメシロハラミズナギドリ"
}
{
"alien":false,
upper:"calonectris"
rank:"species"
sc:"leucomelas"
ja:"オオミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"pacificus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bulleri"
ja:"ミナミオナガミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"griseus"
ja:"ハイイロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"tenuirostris"
ja:"ハシボソミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"creatopus"
ja:"シロハラアカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"carneipes"
ja:"アカアシミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"nativitatis"
ja:"コミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"puffinus"
ja:"マンクスミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"newelli"
ja:"ハワイセグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"lherminieri"
ja:"セグロミズナギドリ"
}
{
"alien":false,
upper:"puffinus"
rank:"species"
sc:"bryani"
ja:"オガサワラヒメミズナギドリ"
}
{
"alien":false,
upper:"bulweria"
rank:"species"
sc:"bulwerii"
ja:"アナドリ"
}
{
"alien":false,
upper:"oceanites"
rank:"species"
sc:"oceanicus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"castro"
ja:"クロコシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"monorhis"
ja:"ヒメクロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"tristrami"
ja:"オーストンウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"matsudairae"
ja:"クロウミツバメ"
}
{
"alien":false,
upper:"oceanodroma"
rank:"species"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"nigra"
ja:"ナベコウ"
}
{
"alien":false,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
upper:"fregata"
rank:"species"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"dactylatra"
ja:"アオツラカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"sula"
ja:"アカアシカツオドリ"
}
{
"alien":false,
upper:"sula"
rank:"species"
sc:"leucogaster"
ja:"カツオドリ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"urile"
ja:"チシマウガラス"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"carbo"
ja:"カワウ"
}
{
"alien":false,
upper:"phalacrocorax"
rank:"species"
sc:"capillatus"
ja:"ウミウ"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"onocrotalus"
ja:"モモイロペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"philippensis"
ja:"ホシバシペリカン"
}
{
"alien":false,
upper:"pelecanus"
rank:"species"
sc:"crispus"
ja:"ハイイロペリカン"
}
{
"alien":false,
upper:"botaurus"
rank:"species"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"eurhythmus"
ja:"オオヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"cinnamomeus"
ja:"リュウキュウヨシゴイ"
}
{
"alien":false,
upper:"ixobrychus"
rank:"species"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"goisagi"
ja:"ミゾゴイ"
}
{
"alien":false,
upper:"gorsachius"
rank:"species"
sc:"melanolophus"
ja:"ズグロミゾゴイ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
upper:"nycticorax"
rank:"species"
sc:"caledonicus"
ja:"ハシブトゴイ"
}
{
"alien":false,
upper:"butorides"
rank:"species"
sc:"striata"
ja:"ササゴイ"
}
{
"alien":false,
upper:"ardeola"
rank:"species"
sc:"bacchus"
ja:"アカガシラサギ"
}
{
"alien":false,
upper:"bubulcus"
rank:"species"
sc:"ibis"
ja:"アマサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"cinerea"
ja:"アオサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"purpurea"
ja:"ムラサキサギ"
}
{
"alien":false,
upper:"ardea"
rank:"species"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
upper:"egretta"
rank:"species"
sc:"eulophotes"
ja:"カラシラサギ"
}
{
"alien":false,
upper:"threskiornis"
rank:"species"
sc:"melanocephalus"
ja:"クロトキ"
}
{
"alien":false,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
upper:"platalea"
rank:"species"
sc:"minor"
ja:"クロツラヘラサギ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"leucogeranus"
ja:"ソデグロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"vipio"
ja:"マナヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"japonensis"
ja:"タンチョウ"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"grus"
ja:"クロヅル"
}
{
"alien":false,
upper:"grus"
rank:"species"
sc:"monacha"
ja:"ナベヅル"
}
{
"alien":false,
upper:"anthropoides"
rank:"species"
sc:"virgo"
ja:"アネハヅル"
}
{
"alien":false,
upper:"coturnicops"
rank:"species"
sc:"exquisitus"
ja:"シマクイナ"
}
{
"alien":false,
upper:"rallina"
rank:"species"
sc:"eurizonoides"
ja:"オオクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"okinawae"
ja:"ヤンバルクイナ"
}
{
"alien":false,
upper:"gallirallus"
rank:"species"
sc:"striatus"
ja:"ミナミクイナ"
}
{
"alien":false,
upper:"rallus"
rank:"species"
sc:"aquaticus"
ja:"クイナ"
}
{
"alien":false,
upper:"amaurornis"
rank:"species"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"porzana"
ja:"コモンクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"fusca"
ja:"ヒクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"paykullii"
ja:"コウライクイナ"
}
{
"alien":false,
upper:"porzana"
rank:"species"
sc:"cinerea"
ja:"マミジロクイナ"
}
{
"alien":false,
upper:"gallicrex"
rank:"species"
sc:"cinerea"
ja:"ツルクイナ"
}
{
"alien":false,
upper:"gallinula"
rank:"species"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
upper:"fulica"
rank:"species"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
upper:"otis"
rank:"species"
sc:"tarda"
ja:"ノガン"
}
{
"alien":false,
upper:"tetrax"
rank:"species"
sc:"tetrax"
ja:"ヒメノガン"
}
{
"alien":false,
upper:"centropus"
rank:"species"
sc:"bengalensis"
ja:"バンケン"
}
{
"alien":false,
upper:"clamator"
rank:"species"
sc:"coromandus"
ja:"カンムリカッコウ"
}
{
"alien":false,
upper:"eudynamys"
rank:"species"
sc:"scolopaceus"
ja:"オニカッコウ"
}
{
"alien":false,
upper:"urodynamis"
rank:"species"
sc:"taitensis"
ja:"キジカッコウ"
}
{
"alien":false,
upper:"surniculus"
rank:"species"
sc:"lugubris"
ja:"オウチュウカッコウ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"sparverioides"
ja:"オオジュウイチ"
}
{
"alien":false,
upper:"hierococcyx"
rank:"species"
sc:"hyperythrus"
ja:"ジュウイチ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"poliocephalus"
ja:"ホトトギス"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"optatus"
ja:"ツツドリ"
}
{
"alien":false,
upper:"cuculus"
rank:"species"
sc:"canorus"
ja:"カッコウ"
}
{
"alien":false,
upper:"caprimulgus"
rank:"species"
sc:"indicus"
ja:"ヨタカ"
}
{
"alien":false,
upper:"aerodramus"
rank:"species"
sc:"brevirostris"
ja:"ヒマラヤアナツバメ"
}
{
"alien":false,
upper:"hirundapus"
rank:"species"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"pacificus"
ja:"アマツバメ"
}
{
"alien":false,
upper:"apus"
rank:"species"
sc:"nipalensis"
ja:"ヒメアマツバメ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"vanellus"
ja:"タゲリ"
}
{
"alien":false,
upper:"vanellus"
rank:"species"
sc:"cinereus"
ja:"ケリ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"apricaria"
ja:"ヨーロッパムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"fulva"
ja:"ムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"dominica"
ja:"アメリカムナグロ"
}
{
"alien":false,
upper:"pluvialis"
rank:"species"
sc:"squatarola"
ja:"ダイゼン"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"hiaticula"
ja:"ハジロコチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"semipalmatus"
ja:"ミズカキチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"placidus"
ja:"イカルチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"dubius"
ja:"コチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"alexandrinus"
ja:"シロチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"mongolus"
ja:"メダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"leschenaultii"
ja:"オオメダイチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"veredus"
ja:"オオチドリ"
}
{
"alien":false,
upper:"charadrius"
rank:"species"
sc:"morinellus"
ja:"コバシチドリ"
}
{
"alien":false,
upper:"haematopus"
rank:"species"
sc:"ostralegus"
ja:"ミヤコドリ"
}
{
"alien":false,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
upper:"recurvirostra"
rank:"species"
sc:"avosetta"
ja:"ソリハシセイタカシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"rusticola"
ja:"ヤマシギ"
}
{
"alien":false,
upper:"scolopax"
rank:"species"
sc:"mira"
ja:"アマミヤマシギ"
}
{
"alien":false,
upper:"lymnocryptes"
rank:"species"
sc:"minimus"
ja:"コシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"solitaria"
ja:"アオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"hardwickii"
ja:"オオジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"stenura"
ja:"ハリオシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"megala"
ja:"チュウジシギ"
}
{
"alien":false,
upper:"gallinago"
rank:"species"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"griseus"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"scolopaceus"
ja:"オオハシシギ"
}
{
"alien":false,
upper:"limnodromus"
rank:"species"
sc:"semipalmatus"
ja:"シベリアオオハシシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"limosa"
ja:"オグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"haemastica"
ja:"アメリカオグロシギ"
}
{
"alien":false,
upper:"limosa"
rank:"species"
sc:"lapponica"
ja:"オオソリハシシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"minutus"
ja:"コシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"phaeopus"
ja:"チュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tahitiensis"
ja:"ハリモモチュウシャク"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"tenuirostris"
ja:"シロハラチュウシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"arquata"
ja:"ダイシャクシギ"
}
{
"alien":false,
upper:"numenius"
rank:"species"
sc:"madagascariensis"
ja:"ホウロクシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"erythropus"
ja:"ツルシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"totanus"
ja:"アカアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"stagnatilis"
ja:"コアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"nebularia"
ja:"アオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"guttifer"
ja:"カラフトアオアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"melanoleuca"
ja:"オオキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"flavipes"
ja:"コキアシシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"ochropus"
ja:"クサシギ"
}
{
"alien":false,
upper:"tringa"
rank:"species"
sc:"glareola"
ja:"タカブシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"brevipes"
ja:"キアシシギ"
}
{
"alien":false,
upper:"heteroscelus"
rank:"species"
sc:"incanus"
ja:"メリケンキアシシギ"
}
{
"alien":false,
upper:"xenus"
rank:"species"
sc:"cinereus"
ja:"ソリハシシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"hypoleucos"
ja:"イソシギ"
}
{
"alien":false,
upper:"actitis"
rank:"species"
sc:"macularius"
ja:"アメリカイソシギ"
}
{
"alien":false,
upper:"arenaria"
rank:"species"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"tenuirostris"
ja:"オバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"canutus"
ja:"コオバシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alba"
ja:"ミユビシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"mauri"
ja:"ヒメハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ruficollis"
ja:"トウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"minuta"
ja:"ヨーロッパトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"temminckii"
ja:"オジロトウネン"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"subminuta"
ja:"ヒバリシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"fuscicollis"
ja:"コシジロウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"bairdii"
ja:"ヒメウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"melanotos"
ja:"アメリカウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"acuminata"
ja:"ウズラシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ferruginea"
ja:"サルハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"ptilocnemis"
ja:"チシマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"alpina"
ja:"ハマシギ"
}
{
"alien":false,
upper:"calidris"
rank:"species"
sc:"himantopus"
ja:"アシナガシギ"
}
{
"alien":false,
upper:"eurynorhynchus"
rank:"species"
sc:"pygmeus"
ja:"ヘラシギ"
}
{
"alien":false,
upper:"limicola"
rank:"species"
sc:"falcinellus"
ja:"キリアイ"
}
{
"alien":false,
upper:"tryngites"
rank:"species"
sc:"subruficollis"
ja:"コモンシギ"
}
{
"alien":false,
upper:"philomachus"
rank:"species"
sc:"pugnax"
ja:"エリマキシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"tricolor"
ja:"アメリカヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"lobatus"
ja:"アカエリヒレアシシギ"
}
{
"alien":false,
upper:"phalaropus"
rank:"species"
sc:"fulicarius"
ja:"ハイイロヒレアシシギ"
}
{
"alien":false,
upper:"hydrophasianus"
rank:"species"
sc:"chirurgus"
ja:"レンカク"
}
{
"alien":false,
upper:"rostratula"
rank:"species"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
upper:"turnix"
rank:"species"
sc:"suscitator"
ja:"ミフウズラ"
}
{
"alien":false,
upper:"glareola"
rank:"species"
sc:"maldivarum"
ja:"ツバメチドリ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"stolidus"
ja:"クロアジサシ"
}
{
"alien":false,
upper:"anous"
rank:"species"
sc:"minutus"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
upper:"procelsterna"
rank:"species"
sc:"cerulea"
ja:"ハイイロアジサシ"
}
{
"alien":false,
upper:"gygis"
rank:"species"
sc:"alba"
ja:"シロアジサシ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"tridactyla"
ja:"ミツユビカモメ"
}
{
"alien":false,
upper:"rissa"
rank:"species"
sc:"brevirostris"
ja:"アカアシミツユビカモメ"
}
{
"alien":false,
upper:"pagophila"
rank:"species"
sc:"eburnea"
ja:"ゾウゲカモメ"
}
{
"alien":false,
upper:"xema"
rank:"species"
sc:"sabini"
ja:"クビワカモメ"
}
{
"alien":false,
upper:"rhodostethia"
rank:"species"
sc:"rosea"
ja:"ヒメクビワカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"genei"
ja:"ハシボソカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"philadelphia"
ja:"ボナパルトカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"brunnicephalus"
ja:"チャガシラカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ridibundus"
ja:"ユリカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"saundersi"
ja:"ズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"minutus"
ja:"ヒメカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"atricilla"
ja:"ワライカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"pipixcan"
ja:"アメリカズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"relictus"
ja:"ゴビズキンカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"ichthyaetus"
ja:"オオズグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"crassirostris"
ja:"ウミネコ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"canus"
ja:"カモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucescens"
ja:"ワシカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"hyperboreus"
ja:"シロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"thayeri"
ja:"カナダカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"argentatus"
ja:"セグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"cachinnans"
ja:"キアシセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"schistisagus"
ja:"オオセグロカモメ"
}
{
"alien":false,
upper:"larus"
rank:"species"
sc:"fuscus"
ja:"ニシセグロカモメ"
}
{
"alien":false,
upper:"gelochelidon"
rank:"species"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"caspia"
ja:"オニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bergii"
ja:"オオアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"bengalensis"
ja:"ベンガルアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"albifrons"
ja:"コアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"aleutica"
ja:"コシジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"lunata"
ja:"ナンヨウマミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"fuscata"
ja:"セグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"dougallii"
ja:"ベニアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"sumatrana"
ja:"エリグロアジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"hirundo"
ja:"アジサシ"
}
{
"alien":false,
upper:"sterna"
rank:"species"
sc:"paradisaea"
ja:"キョクアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"hybrida"
ja:"クロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"leucopterus"
ja:"ハジロクロハラアジサシ"
}
{
"alien":false,
upper:"chlidonias"
rank:"species"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"maccormicki"
ja:"オオトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"pomarinus"
ja:"トウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"parasiticus"
ja:"クロトウゾクカモメ"
}
{
"alien":false,
upper:"stercorarius"
rank:"species"
sc:"longicaudus"
ja:"シロハラトウゾクカモメ"
}
{
"alien":false,
upper:"alle"
rank:"species"
sc:"alle"
ja:"ヒメウミスズメ"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"lomvia"
ja:"ハシブトウミガラス"
}
{
"alien":false,
upper:"uria"
rank:"species"
sc:"aalge"
ja:"ウミガラス"
}
{
"alien":false,
upper:"alca"
rank:"species"
sc:"torda"
ja:"オオハシウミガラス"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"columba"
ja:"ウミバト"
}
{
"alien":false,
upper:"cepphus"
rank:"species"
sc:"carbo"
ja:"ケイマフリ"
}
{
"alien":false,
upper:"brachyramphus"
rank:"species"
sc:"perdix"
ja:"マダラウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"antiquus"
ja:"ウミスズメ"
}
{
"alien":false,
upper:"synthliboramphus"
rank:"species"
sc:"wumizusume"
ja:"カンムリウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"psittacula"
ja:"ウミオウム"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pusilla"
ja:"コウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"pygmaea"
ja:"シラヒゲウミスズメ"
}
{
"alien":false,
upper:"aethia"
rank:"species"
sc:"cristatella"
ja:"エトロフウミスズメ"
}
{
"alien":false,
upper:"cerorhinca"
rank:"species"
sc:"monocerata"
ja:"ウトウ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"corniculata"
ja:"ツノメドリ"
}
{
"alien":false,
upper:"fratercula"
rank:"species"
sc:"cirrhata"
ja:"エトピリカ"
}
{
"alien":false,
upper:"pandion"
rank:"species"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
upper:"pernis"
rank:"species"
sc:"ptilorhynchus"
ja:"ハチクマ"
}
{
"alien":false,
upper:"elanus"
rank:"species"
sc:"caeruleus"
ja:"カタグロトビ"
}
{
"alien":false,
upper:"milvus"
rank:"species"
sc:"migrans"
ja:"トビ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"leucocephalus"
ja:"ハクトウワシ"
}
{
"alien":false,
upper:"haliaeetus"
rank:"species"
sc:"pelagicus"
ja:"オオワシ"
}
{
"alien":false,
upper:"aegypius"
rank:"species"
sc:"monachus"
ja:"クロハゲワシ"
}
{
"alien":false,
upper:"spilornis"
rank:"species"
sc:"cheela"
ja:"カンムリワシ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"macrourus"
ja:"ウスハイイロチュウヒ"
}
{
"alien":false,
upper:"circus"
rank:"species"
sc:"melanoleucos"
ja:"マダラチュウヒ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"soloensis"
ja:"アカハラダカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"nisus"
ja:"ハイタカ"
}
{
"alien":false,
upper:"accipiter"
rank:"species"
sc:"gentilis"
ja:"オオタカ"
}
{
"alien":false,
upper:"butastur"
rank:"species"
sc:"indicus"
ja:"サシバ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"buteo"
ja:"ノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"hemilasius"
ja:"オオノスリ"
}
{
"alien":false,
upper:"buteo"
rank:"species"
sc:"lagopus"
ja:"ケアシノスリ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"clanga"
ja:"カラフトワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"heliaca"
ja:"カタシロワシ"
}
{
"alien":false,
upper:"aquila"
rank:"species"
sc:"chrysaetos"
ja:"イヌワシ"
}
{
"alien":false,
upper:"nisaetus"
rank:"species"
sc:"nipalensis"
ja:"クマタカ"
}
{
"alien":false,
upper:"tyto"
rank:"species"
sc:"longimembris"
ja:"ヒガシメンフクロウ"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"lempiji"
ja:"オオコノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"sunia"
ja:"コノハズク"
}
{
"alien":false,
upper:"otus"
rank:"species"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"scandiacus"
ja:"シロフクロウ"
}
{
"alien":false,
upper:"bubo"
rank:"species"
sc:"bubo"
ja:"ワシミミズク"
}
{
"alien":false,
upper:"ketupa"
rank:"species"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
upper:"strix"
rank:"species"
sc:"uralensis"
ja:"フクロウ"
}
{
"alien":false,
upper:"aegolius"
rank:"species"
sc:"funereus"
ja:"キンメフクロウ"
}
{
"alien":false,
upper:"ninox"
rank:"species"
sc:"scutulata"
ja:"アオバズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
upper:"asio"
rank:"species"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
upper:"upupa"
rank:"species"
sc:"epops"
ja:"ヤツガシラ"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"coromanda"
ja:"アカショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"smyrnensis"
ja:"アオショウビン"
}
{
"alien":false,
upper:"halcyon"
rank:"species"
sc:"pileata"
ja:"ヤマショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"chloris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
upper:"todiramphus"
rank:"species"
sc:"miyakoensis"
ja:"ミヤコショウビン"
}
{
"alien":false,
upper:"alcedo"
rank:"species"
sc:"atthis"
ja:"カワセミ"
}
{
"alien":false,
upper:"ceyx"
rank:"species"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
upper:"megaceryle"
rank:"species"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
upper:"merops"
rank:"species"
sc:"ornatus"
ja:"ハチクイ"
}
{
"alien":false,
upper:"eurystomus"
rank:"species"
sc:"orientalis"
ja:"ブッポウソウ"
}
{
"alien":false,
upper:"jynx"
rank:"species"
sc:"torquilla"
ja:"アリスイ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"hyperythrus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"kizuki"
ja:"コゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"minor"
ja:"コアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"leucotos"
ja:"オオアカゲラ"
}
{
"alien":false,
upper:"dendrocopos"
rank:"species"
sc:"major"
ja:"アカゲラ"
}
{
"alien":false,
upper:"picoides"
rank:"species"
sc:"tridactylus"
ja:"ミユビゲラ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"javensis"
ja:"キタタキ"
}
{
"alien":false,
upper:"dryocopus"
rank:"species"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
upper:"picus"
rank:"species"
sc:"canus"
ja:"ヤマゲラ"
}
{
"alien":false,
upper:"sapheopipo"
rank:"species"
sc:"noguchii"
ja:"ノグチゲラ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"naumanni"
ja:"ヒメチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"tinnunculus"
ja:"チョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"amurensis"
ja:"アカアシチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"columbarius"
ja:"コチョウゲンボウ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"cherrug"
ja:"ワキスジハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"rusticolus"
ja:"シロハヤブサ"
}
{
"alien":false,
upper:"falco"
rank:"species"
sc:"peregrinus"
ja:"ハヤブサ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"sordida"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
upper:"pitta"
rank:"species"
sc:"nympha"
ja:"ヤイロチョウ"
}
{
"alien":false,
upper:"artamus"
rank:"species"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
upper:"coracina"
rank:"species"
sc:"melaschistos"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
upper:"pericrocotus"
rank:"species"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
upper:"oriolus"
rank:"species"
sc:"chinensis"
ja:"コウライウグイス"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"macrocercus"
ja:"オウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"leucophaeus"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
upper:"dicrurus"
rank:"species"
sc:"hottentottus"
ja:"カンムリオウチュウ"
}
{
"alien":false,
upper:"hypothymis"
rank:"species"
sc:"azurea"
ja:"クロエリヒタキ"
}
{
"alien":false,
upper:"terpsiphone"
rank:"species"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"tigrinus"
ja:"チゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"cristatus"
ja:"アカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"collurio"
ja:"セアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"isabellinus"
ja:"モウコアカモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"excubitor"
ja:"オオモズ"
}
{
"alien":false,
upper:"lanius"
rank:"species"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"glandarius"
ja:"カケス"
}
{
"alien":false,
upper:"garrulus"
rank:"species"
sc:"lidthi"
ja:"ルリカケス"
}
{
"alien":false,
upper:"cyanopica"
rank:"species"
sc:"cyanus"
ja:"オナガ"
}
{
"alien":false,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":false,
upper:"nucifraga"
rank:"species"
sc:"caryocatactes"
ja:"ホシガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"monedula"
ja:"ニシコクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"dauuricus"
ja:"コクマルガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"frugilegus"
ja:"ミヤマガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corone"
ja:"ハシボソガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"macrorhynchos"
ja:"ハシブトガラス"
}
{
"alien":false,
upper:"corvus"
rank:"species"
sc:"corax"
ja:"ワタリガラス"
}
{
"alien":false,
upper:"regulus"
rank:"species"
sc:"regulus"
ja:"キクイタダキ"
}
{
"alien":false,
upper:"remiz"
rank:"species"
sc:"pendulinus"
ja:"ツリスガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"palustris"
ja:"ハシブトガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"montanus"
ja:"コガラ"
}
{
"alien":false,
upper:"poecile"
rank:"species"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"ater"
ja:"ヒガラ"
}
{
"alien":false,
upper:"periparus"
rank:"species"
sc:"venustulus"
ja:"キバラガラ"
}
{
"alien":false,
upper:"parus"
rank:"species"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
upper:"cyanistes"
rank:"species"
sc:"cyanus"
ja:"ルリガラ"
}
{
"alien":false,
upper:"panurus"
rank:"species"
sc:"biarmicus"
ja:"ヒゲガラ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"bimaculata"
ja:"クビワコウテンシ"
}
{
"alien":false,
upper:"melanocorypha"
rank:"species"
sc:"mongolica"
ja:"コウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"brachydactyla"
ja:"ヒメコウテンシ"
}
{
"alien":false,
upper:"calandrella"
rank:"species"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
upper:"alauda"
rank:"species"
sc:"arvensis"
ja:"ヒバリ"
}
{
"alien":false,
upper:"eremophila"
rank:"species"
sc:"alpestris"
ja:"ハマヒバリ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"paludicola"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
upper:"riparia"
rank:"species"
sc:"riparia"
ja:"ショウドウツバメ"
}
{
"alien":false,
upper:"tachycineta"
rank:"species"
sc:"bicolor"
ja:"ミドリツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"rustica"
ja:"ツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"tahitica"
ja:"リュウキュウツバメ"
}
{
"alien":false,
upper:"hirundo"
rank:"species"
sc:"daurica"
ja:"コシアカツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"urbicum"
ja:"ニシイワツバメ"
}
{
"alien":false,
upper:"delichon"
rank:"species"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
upper:"pycnonotus"
rank:"species"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
upper:"hypsipetes"
rank:"species"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
upper:"cettia"
rank:"species"
sc:"diphone"
ja:"ウグイス"
}
{
"alien":false,
upper:"urosphena"
rank:"species"
sc:"squameiceps"
ja:"ヤブサメ"
}
{
"alien":false,
upper:"aegithalos"
rank:"species"
sc:"caudatus"
ja:"エナガ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"trochilus"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"collybita"
ja:"チフチャフ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"sibilatrix"
ja:"モリムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"affinis"
ja:"キバラムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"schwarzi"
ja:"カラフトムジセッカ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"proregulus"
ja:"カラフトムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"inornatus"
ja:"キマユムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"examinandus"
ja:"オオムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"xanthodryas"
ja:"メボソムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"plumbeitarsus"
ja:"ヤナギムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"borealoides"
ja:"エゾムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"coronatus"
ja:"センダイムシクイ"
}
{
"alien":false,
upper:"phylloscopus"
rank:"species"
sc:"ijimae"
ja:"イイジマムシクイ"
}
{
"alien":false,
upper:"sylvia"
rank:"species"
sc:"curruca"
ja:"コノドジロムシクイ"
}
{
"alien":false,
upper:"apalopteron"
rank:"species"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"erythropleurus"
ja:"チョウセンメジロ"
}
{
"alien":false,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"lanceolata"
ja:"マキノセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"ochotensis"
ja:"シマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pleskei"
ja:"ウチヤマセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"certhiola"
ja:"シベリアセンニュウ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
upper:"locustella"
rank:"species"
sc:"fasciolata"
ja:"エゾセンニュウ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"orientalis"
ja:"オオヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"sorghophilus"
ja:"セスジコヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"agricola"
ja:"イナダヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"dumetorum"
ja:"ヤブヨシキリ"
}
{
"alien":false,
upper:"acrocephalus"
rank:"species"
sc:"aedon"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
upper:"iduna"
rank:"species"
sc:"caligata"
ja:"ヒメウタイムシクイ"
}
{
"alien":false,
upper:"cisticola"
rank:"species"
sc:"juncidis"
ja:"セッカ"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"garrulus"
ja:"キレンジャク"
}
{
"alien":false,
upper:"bombycilla"
rank:"species"
sc:"japonica"
ja:"ヒレンジャク"
}
{
"alien":false,
upper:"sitta"
rank:"species"
sc:"europaea"
ja:"ゴジュウカラ"
}
{
"alien":false,
upper:"certhia"
rank:"species"
sc:"familiaris"
ja:"キバシリ"
}
{
"alien":false,
upper:"troglodytes"
rank:"species"
sc:"troglodytes"
ja:"ミソサザイ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"sericeus"
ja:"ギンムクドリ"
}
{
"alien":false,
upper:"spodiopsar"
rank:"species"
sc:"cineraceus"
ja:"ムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"sturninus"
ja:"シベリアムクドリ"
}
{
"alien":false,
upper:"agropsar"
rank:"species"
sc:"philippensis"
ja:"コムクドリ"
}
{
"alien":false,
upper:"sturnia"
rank:"species"
sc:"sinensis"
ja:"カラムクドリ"
}
{
"alien":false,
upper:"pastor"
rank:"species"
sc:"roseus"
ja:"バライロムクドリ"
}
{
"alien":false,
upper:"sturnus"
rank:"species"
sc:"vulgaris"
ja:"ホシムクドリ"
}
{
"alien":false,
upper:"cinclus"
rank:"species"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"sibirica"
ja:"マミジロ"
}
{
"alien":false,
upper:"zoothera"
rank:"species"
sc:"dauma"
ja:"トラツグミ"
}
{
"alien":false,
upper:"cichlopasser"
rank:"species"
sc:"terrestris"
ja:"オガサワラガビチョウ"
}
{
"alien":false,
upper:"catharus"
rank:"species"
sc:"minimus"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"hortulorum"
ja:"カラアカハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"cardis"
ja:"クロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"merula"
ja:"クロウタドリ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"obscurus"
ja:"マミチャジナイ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pallidus"
ja:"シロハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"chrysolaus"
ja:"PI:NAME:<NAME>END_PIカハラ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"celaenops"
ja:"アカコッコ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"ruficollis"
ja:"ノドグロツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"naumanni"
ja:"ツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"pilaris"
ja:"ノハラツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"iliacus"
ja:"ワキアカツグミ"
}
{
"alien":false,
upper:"turdus"
rank:"species"
sc:"viscivorus"
ja:"ヤドリギツグミ"
}
{
"alien":false,
upper:"erithacus"
rank:"species"
sc:"rubecula"
ja:"ヨーロッパコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"calliope"
ja:"ノゴマ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"cyane"
ja:"コルリ"
}
{
"alien":false,
upper:"luscinia"
rank:"species"
sc:"sibilans"
ja:"シマゴマ"
}
{
"alien":false,
upper:"tarsiger"
rank:"species"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"erythronotus"
ja:"セアカジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"ochruros"
ja:"クロジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
upper:"phoenicurus"
rank:"species"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"rubetra"
ja:"マミジロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"torquatus"
ja:"ノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"caprata"
ja:"クロノビタキ"
}
{
"alien":false,
upper:"saxicola"
rank:"species"
sc:"ferreus"
ja:"ヤマザキヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"isabellina"
ja:"イナバヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"pleschanka"
ja:"セグロサバクヒタキ"
}
{
"alien":false,
upper:"oenanthe"
rank:"species"
sc:"deserti"
ja:"サバクヒタキ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"solitarius"
ja:"イソヒヨドリ"
}
{
"alien":false,
upper:"monticola"
rank:"species"
sc:"gularis"
ja:"ヒメイソヒヨ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"striata"
ja:"ムナフヒタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"griseisticta"
ja:"エゾビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
upper:"muscicapa"
rank:"species"
sc:"ferruginea"
ja:"ミヤマヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"hypoleuca"
ja:"マダラヒタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"zanthopygia"
ja:"マミジロキビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"mugimaki"
ja:"ムギマキ"
}
{
"alien":false,
upper:"ficedula"
rank:"species"
sc:"albicilla"
ja:"オジロビタキ"
}
{
"alien":false,
upper:"cyanoptila"
rank:"species"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
upper:"eumyias"
rank:"species"
sc:"thalassinus"
ja:"ロクショウヒタキ"
}
{
"alien":false,
upper:"niltava"
rank:"species"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"collaris"
ja:"イワヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"montanella"
ja:"ヤマヒバリ"
}
{
"alien":false,
upper:"prunella"
rank:"species"
sc:"rubida"
ja:"カヤクグリ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
upper:"passer"
rank:"species"
sc:"montanus"
ja:"スズメ"
}
{
"alien":false,
upper:"dendronanthus"
rank:"species"
sc:"indicus"
ja:"イワミセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"flava"
ja:"ツメナガセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"alba"
ja:"ハクセキレイ"
}
{
"alien":false,
upper:"motacilla"
rank:"species"
sc:"grandis"
ja:"セグロセキレイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"godlewskii"
ja:"コマミジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"pratensis"
ja:"マキバタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"roseatus"
ja:"ウスベニタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"cervinus"
ja:"ムネアカタヒバリ"
}
{
"alien":false,
upper:"anthus"
rank:"species"
sc:"rubescens"
ja:"タヒバリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
upper:"fringilla"
rank:"species"
sc:"montifringilla"
ja:"アトリ"
}
{
"alien":false,
upper:"chloris"
rank:"species"
sc:"sinica"
ja:"カワラヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"spinus"
ja:"マヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
upper:"carduelis"
rank:"species"
sc:"hornemanni"
ja:"コベニヒワ"
}
{
"alien":false,
upper:"leucosticte"
rank:"species"
sc:"arctoa"
ja:"ハギマシコ"
}
{
"alien":false,
upper:"uragus"
rank:"species"
sc:"sibiricus"
ja:"ベニマシコ"
}
{
"alien":false,
upper:"chaunoproctus"
rank:"species"
sc:"ferreorostris"
ja:"オガサワラマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"erythrinus"
ja:"アカマシコ"
}
{
"alien":false,
upper:"carpodacus"
rank:"species"
sc:"roseus"
ja:"オオマシコ"
}
{
"alien":false,
upper:"pinicola"
rank:"species"
sc:"enucleator"
ja:"ギンザンマシコ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"curvirostra"
ja:"イスカ"
}
{
"alien":false,
upper:"loxia"
rank:"species"
sc:"leucoptera"
ja:"ナキイスカ"
}
{
"alien":false,
upper:"pyrrhula"
rank:"species"
sc:"pyrrhula"
ja:"ウソ"
}
{
"alien":false,
upper:"coccothraustes"
rank:"species"
sc:"coccothraustes"
ja:"シメ"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
upper:"eophona"
rank:"species"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
upper:"calcarius"
rank:"species"
sc:"lapponicus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
upper:"plectrophenax"
rank:"species"
sc:"nivalis"
ja:"ユキホオジロ"
}
{
"alien":false,
upper:"setophaga"
rank:"species"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
upper:"cardellina"
rank:"species"
sc:"pusilla"
ja:"ウィルソンアメリカムシクイ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"lathami"
ja:"レンジャクノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"citrinella"
ja:"キアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"cioides"
ja:"ホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"buchanani"
ja:"イワバホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"hortulana"
ja:"ズアオホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"tristrami"
ja:"シロハラホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pusilla"
ja:"コホオアカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"chrysophrys"
ja:"キマユホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rustica"
ja:"カシラダカ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"aureola"
ja:"シマアオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"rutila"
ja:"シマノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"melanocephala"
ja:"ズグロチャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"bruniceps"
ja:"チャキンチョウ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"sulphurata"
ja:"ノジコ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"spodocephala"
ja:"アオジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"variabilis"
ja:"クロジ"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"pallasi"
ja:"シベリアジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
upper:"emberiza"
rank:"species"
sc:"schoeniclus"
ja:"オオジュリン"
}
{
"alien":false,
upper:"passerella"
rank:"species"
sc:"iliaca"
ja:"ゴマフスズメ"
}
{
"alien":false,
upper:"melospiza"
rank:"species"
sc:"melodia"
ja:"ウタスズメ"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"leucophrys"
ja:"ミヤマシトド"
}
{
"alien":false,
upper:"zonotrichia"
rank:"species"
sc:"atricapilla"
ja:"キガシラシトド"
}
{
"alien":false,
upper:"passerculus"
rank:"species"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
upper:"bambusicola"
rank:"species"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
upper:"syrmaticus"
rank:"species"
sc:"soemmerringii"
ja:"ヤマドリ"
}
{
"alien":true,
upper:"phasianus"
rank:"species"
sc:"colchicus"
ja:"キジ"
}
{
"alien":true,
upper:"pavo"
rank:"species"
sc:"cristatus"
ja:"インドクジャク"
}
{
"alien":true,
upper:"branta"
rank:"species"
sc:"canadensis"
ja:"カナダガン"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"atratus"
ja:"コクチョウ"
}
{
"alien":true,
upper:"cygnus"
rank:"species"
sc:"olor"
ja:"コブハクチョウ"
}
{
"alien":true,
upper:"columba"
rank:"species"
sc:"livia"
ja:"カワラバト(ドバト)"
}
{
"alien":true,
upper:"streptopelia"
rank:"species"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
upper:"ciconia"
rank:"species"
sc:"boyciana"
ja:"コウノトリ"
}
{
"alien":true,
upper:"nipponia"
rank:"species"
sc:"nippon"
ja:"トキ"
}
{
"alien":true,
upper:"himantopus"
rank:"species"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":true,
upper:"melopsittacus"
rank:"species"
sc:"undulatus"
ja:"セキセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"eupatria"
ja:"オオホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"krameri"
ja:"ホンセイインコ"
}
{
"alien":true,
upper:"psittacula"
rank:"species"
sc:"alexandri"
ja:"ダルマインコ"
}
{
"alien":true,
upper:"myiopsitta"
rank:"species"
sc:"monachus"
ja:"オキナインコ"
}
{
"alien":true,
upper:"urocissa"
rank:"species"
sc:"caerulea"
ja:"ヤマムスメ"
}
{
"alien":true,
upper:"pica"
rank:"species"
sc:"pica"
ja:"カササギ"
}
{
"alien":true,
upper:"zosterops"
rank:"species"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"canorus"
ja:"ガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"cineraceus"
ja:"ヒゲガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"perspicillatus"
ja:"カオグロガビチョウ"
}
{
"alien":true,
upper:"garrulax"
rank:"species"
sc:"sannio"
ja:"カオジロガビチョウ"
}
{
"alien":true,
upper:"leiothrix"
rank:"species"
sc:"lutea"
ja:"ソウシチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"javanicus"
ja:"モリハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"ginginianus"
ja:"ハイイロハッカ"
}
{
"alien":true,
upper:"acridotheres"
rank:"species"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
upper:"gracupica"
rank:"species"
sc:"contra"
ja:"ホオジロムクドリ"
}
{
"alien":true,
upper:"ploceus"
rank:"species"
sc:"intermedius"
ja:"メンハタオリドリ"
}
{
"alien":true,
upper:"euplectes"
rank:"species"
sc:"orix"
ja:"オオキンランチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"melpoda"
ja:"ホオアカカエデチョウ"
}
{
"alien":true,
upper:"estrilda"
rank:"species"
sc:"troglodytes"
ja:"カエデチョウ"
}
{
"alien":true,
upper:"amandava"
rank:"species"
sc:"amandava"
ja:"ベニスズメ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"striata"
ja:"コシジロキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"punctulata"
ja:"シマキンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"malacca"
ja:"ギンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"atricapilla"
ja:"キンパラ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"maja"
ja:"ヘキチョウ"
}
{
"alien":true,
upper:"lonchura"
rank:"species"
sc:"oryzivora"
ja:"ブンチョウ"
}
{
"alien":true,
upper:"vidua"
rank:"species"
sc:"paradisaea"
ja:"ホウオウジャク"
}
{
"alien":true,
upper:"paroaria"
rank:"species"
sc:"coronata"
ja:"コウカンチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bonasia"
sc:"vicinitas"
ja:"エゾライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"muta"
sc:"japonica"
ja:"ライチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"intermedius"
ja:"シコクヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"soemmerringii"
ja:"アカヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"soemmerringii"
sc:"ijimae"
ja:"コシジロヤマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"robustipes"
ja:"キジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tohkaidi"
ja:"トウカイキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"tanensis"
ja:"シマキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"colchicus"
sc:"versicolor"
ja:"キュウシュウキジ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"middendorffii"
ja:"オオヒシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"curtus"
ja:"PI:NAME:<NAME>END_PI"
}
{
"alien":false,
rank:"subspecies"
upper:"fabalis"
sc:"serrirostris"
ja:"PI:NAME:<NAME>END_PIシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"anser"
sc:"rubrirostris"
ja:"PI:NAME:<NAME>END_PIイロPI:NAME:<NAME>END_PI"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"albifrons"
ja:"マガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"caerulescens"
ja:"PI:NAME:<NAME>END_PIガン"
}
{
"alien":false,
rank:"subspecies"
upper:"caerulescens"
sc:"atlanticus"
ja:"オオハクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"leucopareia"
ja:"シジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"hutchinsii"
sc:"minima"
ja:"ヒメシジュウカラガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bernicla"
sc:"orientalis"
ja:"コクガン"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"jankowskyi"
ja:"コハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbianus"
sc:"columbianus"
ja:"アメリカコハクチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromandelianus"
sc:"coromandelianus"
ja:"ナンキンオシ"
}
{
"alien":false,
rank:"subspecies"
upper:"strepera"
sc:"strepera"
ja:"オカヨシガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"platyrhynchos"
sc:"platyrhynchos"
ja:"マガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"crecca"
ja:"コガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"crecca"
sc:"carolinensis"
ja:"アメリカコガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"marila"
sc:"marila"
ja:"スズガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"stejnegeri"
ja:"ビロードキンクロ"
}
{
"alien":false,
rank:"subspecies"
upper:"clangula"
sc:"clangula"
ja:"ホオジロガモ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"merganser"
ja:"カワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"merganser"
sc:"orientalis"
ja:"コカワアイサ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"poggei"
ja:"カイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"kunikyonis"
ja:"ダイトウカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"grisegena"
sc:"holbollii"
ja:"アカエリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"cristatus"
ja:"カンムリカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"auritus"
sc:"auritus"
ja:"ミミカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"nigricollis"
sc:"nigricollis"
ja:"ハジロカイツブリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubricauda"
sc:"rothschildi"
ja:"アカオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"lepturus"
sc:"dorotheae"
ja:"シラオネッタイチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"oenas"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"janthina"
ja:"カラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"nitens"
ja:"アカガシラカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"janthina"
sc:"stejnegeri"
ja:"ヨナグニカラスバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"orientalis"
ja:"キジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"stimpsoni"
ja:"リュウキュウキジバト"
}
{
"alien":false,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":false,
rank:"subspecies"
upper:"tranquebarica"
sc:"humilis"
ja:"ベニバト"
}
{
"alien":false,
rank:"subspecies"
upper:"indica"
sc:"yamashinai"
ja:"キンバト"
}
{
"alien":false,
rank:"subspecies"
upper:"sieboldii"
sc:"sieboldii"
ja:"アオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"permagnus"
ja:"ズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"formosae"
sc:"medioximus"
ja:"チュウダイズアカアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"leclancheri"
sc:"taiwanus"
ja:"クロアゴヒメアオバト"
}
{
"alien":false,
rank:"subspecies"
upper:"arctica"
sc:"viridigularis"
ja:"オオハム"
}
{
"alien":false,
rank:"subspecies"
upper:"glacialis"
sc:"rodgersii"
ja:"フルマカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopygia"
sc:"sandwichensis"
ja:"ハワイシロハラミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"cuneatus"
ja:"オナガミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lherminieri"
sc:"bannermani"
ja:"セグロミズナギドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"oceanicus"
sc:"exasperatus"
ja:"アシナガウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorhoa"
sc:"leucorhoa"
ja:"コシジロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"furcata"
sc:"furcata"
ja:"ハイイロウミツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"オオグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ariel"
sc:"ariel"
ja:"コグンカンドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dactylatra"
sc:"personata"
ja:"アオツラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sula"
sc:"rubripes"
ja:"アカアシカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"plotus"
ja:"カツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucogaster"
sc:"brewsteri"
ja:"シロガシラカツオドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pelagicus"
sc:"pelagicus"
ja:"ヒメウ"
}
{
"alien":false,
rank:"subspecies"
upper:"carbo"
sc:"hanedae"
ja:"カワウ"
}
{
"alien":false,
rank:"subspecies"
upper:"stellaris"
sc:"stellaris"
ja:"サンカノゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"ヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"bryani"
ja:"マリアナヨシゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flavicollis"
sc:"flavicollis"
ja:"タカサゴクロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"nycticorax"
sc:"nycticorax"
ja:"ゴイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caledonicus"
sc:"crassirostris"
ja:"ハシブトゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"amurensis"
ja:"ササゴイ"
}
{
"alien":false,
rank:"subspecies"
upper:"ibis"
sc:"coromandus"
ja:"アマサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"jouyi"
ja:"アオサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"purpurea"
sc:"manilensis"
ja:"ムラサキサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alba"
ja:"ダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"modesta"
ja:"チュウダイサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"intermedia"
sc:"intermedia"
ja:"チュウサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"garzetta"
sc:"garzetta"
ja:"コサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"sacra"
sc:"sacra"
ja:"クロサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorodia"
sc:"leucorodia"
ja:"ヘラサギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canadensis"
sc:"canadensis"
ja:"カナダヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"grus"
sc:"lilfordi"
ja:"クロヅル"
}
{
"alien":false,
rank:"subspecies"
upper:"eurizonoides"
sc:"sepiaria"
ja:"オオクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"striatus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aquaticus"
sc:"indicus"
ja:"クイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロハラクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"pusilla"
ja:"ヒメクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"erythrothorax"
ja:"ヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"fusca"
sc:"phaeopyga"
ja:"リュウキュウヒクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"brevipes"
ja:"マミジロクイナ"
}
{
"alien":false,
rank:"subspecies"
upper:"chloropus"
sc:"chloropus"
ja:"バン"
}
{
"alien":false,
rank:"subspecies"
upper:"atra"
sc:"atra"
ja:"オオバン"
}
{
"alien":false,
rank:"subspecies"
upper:"tarda"
sc:"dybowskii"
ja:"ノガン"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"lignator"
ja:"バンケン"
}
{
"alien":false,
rank:"subspecies"
upper:"scolopaceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"micropterus"
sc:"micropterus"
ja:"セグロカッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"canorus"
sc:"telephonus"
ja:"カッコウ"
}
{
"alien":false,
rank:"subspecies"
upper:"indicus"
sc:"jotaka"
ja:"ヨタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"brevirostris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"caudacutus"
sc:"caudacutus"
ja:"ハリオアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"pacificus"
ja:"キタアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"pacificus"
sc:"kurodae"
ja:"アマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"kuntzi"
ja:"ヒメアマツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"apricaria"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"hiaticula"
sc:"tundrae"
ja:"ハジロコチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"dubius"
sc:"curonicus"
ja:"コチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"alexandrinus"
ja:"ハシボソシロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alexandrinus"
sc:"dealbatus"
ja:"シロチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"mongolus"
ja:"モウコメダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"mongolus"
sc:"stegmanni"
ja:"メダイチドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"ostralegus"
sc:"osculans"
ja:"PI:NAME:<NAME>END_PIコドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"himantopus"
ja:"セイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"himantopus"
sc:"leucocephalus"
ja:"オーストラリアセイタカシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitaria"
sc:"japonica"
ja:"アオシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"gallinago"
sc:"gallinago"
ja:"タシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"griseus"
sc:"hendersoni"
ja:"アメリカオオハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"limosa"
sc:"melanuroides"
ja:"オグロシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"menzbieri"
ja:"コシジロオオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponica"
sc:"baueri"
ja:"オオソリハシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"phaeopus"
sc:"variegatus"
ja:"チュウシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"arquata"
sc:"orientalis"
ja:"ダイシャクシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"totanus"
sc:"ussuriensis"
ja:"アカアシシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"interpres"
sc:"interpres"
ja:"キョウジョシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"canutus"
sc:"rogersi"
ja:"コオバシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilocnemis"
sc:"quarta"
ja:"チシマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"sakhalina"
ja:"ハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpina"
sc:"arcticola"
ja:"キタアラスカハマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"falcinellus"
sc:"sibirica"
ja:"キリアイ"
}
{
"alien":false,
rank:"subspecies"
upper:"benghalensis"
sc:"benghalensis"
ja:"タマシギ"
}
{
"alien":false,
rank:"subspecies"
upper:"suscitator"
sc:"okinavensis"
ja:"ミフウズラ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pileatus"
ja:"クロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"stolidus"
sc:"pullus"
ja:"リュウキュウクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"minutus"
sc:"marcusi"
ja:"ヒメクロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cerulea"
sc:"saxatilis"
ja:"ハイイロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"candida"
ja:"シロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactyla"
sc:"pollicaris"
ja:"ミツユビカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"atricilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"kamtschatschensis"
ja:"カモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"heinei"
ja:"ニシシベリアカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"brachyrhynchus"
ja:"コカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperboreus"
sc:"pallidissimus"
ja:"シロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"glaucoides"
ja:"アイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"glaucoides"
sc:"kumlieni"
ja:"クムリーンアイスランドカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"vegae"
ja:"セグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"argentatus"
sc:"smithsonianus"
ja:"アメリカセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"cachinnans"
sc:"mongolicus"
ja:"キアシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"heuglini"
ja:"ニシセグロカモメ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"nilotica"
sc:"nilotica"
ja:"ハシブトアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bergii"
sc:"cristata"
ja:"オオアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"bengalensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"albifrons"
sc:"sinensis"
ja:"コアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"anaethetus"
sc:"anaethetus"
ja:"マミジロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscata"
sc:"nubilosa"
ja:"セグロアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"dougallii"
sc:"bangsi"
ja:"ベニアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"minussensis"
ja:"アカアシアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hirundo"
sc:"longipennis"
ja:"アジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"hybrida"
sc:"javanicus"
ja:"クロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"niger"
ja:"ハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"niger"
sc:"surinamensis"
ja:"アメリカハシグロクロハラアジサシ"
}
{
"alien":false,
rank:"subspecies"
upper:"alle"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lomvia"
sc:"arra"
ja:"ハシブトウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"aalge"
sc:"inornata"
ja:"ウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"torda"
sc:"islandica"
ja:"オオハシウミガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"snowi"
ja:"ウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"columba"
sc:"kaiurka"
ja:"アリューシャンウミバト"
}
{
"alien":false,
rank:"subspecies"
upper:"haliaetus"
sc:"haliaetus"
ja:"ミサゴ"
}
{
"alien":false,
rank:"subspecies"
upper:"ptilorhynchus"
sc:"orientalis"
ja:"ハチクマ"
}
{
"alien":false,
rank:"subspecies"
upper:"caeruleus"
sc:"hypoleucus"
ja:"カタグロトビ"
}
{
"alien":false,
rank:"subspecies"
upper:"migrans"
sc:"lineatus"
ja:"トビ"
}
{
"alien":false,
rank:"subspecies"
upper:"albicilla"
sc:"albicilla"
ja:"オジロワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalus"
sc:"washingtoniensis"
ja:"ハクトウワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheela"
sc:"perplexus"
ja:"カンムリワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"aeruginosus"
sc:"aeruginosus"
ja:"ヨーロッパチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"spilonotus"
sc:"spilonotus"
ja:"チュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyaneus"
sc:"cyaneus"
ja:"ハイイロチュウヒ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"gularis"
ja:"ツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"gularis"
sc:"iwasakii"
ja:"リュウキュウツミ"
}
{
"alien":false,
rank:"subspecies"
upper:"nisus"
sc:"nisosimilis"
ja:"ハイタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"albidus"
ja:"シロオオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"gentilis"
sc:"fujiyamae"
ja:"オオタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"japonicus"
ja:"ノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"toyoshimai"
ja:"オガサワラノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"buteo"
sc:"oshiroi"
ja:"ダイトウノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"lagopus"
sc:"menzbieri"
ja:"ケアシノスリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysaetos"
sc:"japonica"
ja:"イヌワシ"
}
{
"alien":false,
rank:"subspecies"
upper:"nipalensis"
sc:"orientalis"
ja:"クマタカ"
}
{
"alien":false,
rank:"subspecies"
upper:"longimembris"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"ussuriensis"
ja:"サメイロオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"semitorques"
ja:"オオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"lempiji"
sc:"pryeri"
ja:"リュウキュウオオコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"sunia"
sc:"japonicus"
ja:"コノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"interpositus"
ja:"ダイトウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"リュウキュウコノハズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"kiautschensis"
ja:"タイリクワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"bubo"
sc:"borissowi"
ja:"ワシミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"blakistoni"
sc:"blakistoni"
ja:"シマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"japonica"
ja:"エゾフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"hondoensis"
ja:"フクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"momiyamae"
ja:"モミヤマフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"fuscescens"
ja:"キュウシュウフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"uralensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"funereus"
sc:"magnus"
ja:"キンメフクロウ"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"macroptera"
ja:"チョウセンアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"japonica"
ja:"アオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"scutulata"
sc:"totogo"
ja:"リュウキュウアオバズク"
}
{
"alien":false,
rank:"subspecies"
upper:"otus"
sc:"otus"
ja:"トラフズク"
}
{
"alien":false,
rank:"subspecies"
upper:"flammeus"
sc:"flammeus"
ja:"コミミズク"
}
{
"alien":false,
rank:"subspecies"
upper:"epops"
sc:"saturata"
ja:"ヤツガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"major"
ja:"アカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"coromanda"
sc:"bangsi"
ja:"リュウキュウアカショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"smyrnensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"chloris"
sc:"collaris"
ja:"ナンヨウショウビン"
}
{
"alien":false,
rank:"subspecies"
upper:"atthis"
sc:"bengalensis"
ja:"カワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"erithaca"
sc:"erithaca"
ja:"ミツユビカワセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"pallida"
ja:"エゾヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"lugubris"
sc:"lugubris"
ja:"ヤマセミ"
}
{
"alien":false,
rank:"subspecies"
upper:"orientalis"
sc:"calonyx"
ja:"ブッポウソウ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"chinensis"
ja:"シベリアアリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquilla"
sc:"japonica"
ja:"アリスイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hyperythrus"
sc:"subrufinus"
ja:"チャバラアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"seebohmi"
ja:"エゾコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nippon"
ja:"コゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"matsudairai"
ja:"ミヤケコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"shikokuensis"
ja:"シコクコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kotataki"
ja:"ツシマコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"kizuki"
ja:"キュウシュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"amamii"
ja:"アマミコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"nigrescens"
ja:"リュウキュウコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"orii"
ja:"オリイコゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"kizuki"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amurensis"
ja:"コアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"subcirris"
ja:"エゾオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"stejnegeri"
ja:"オオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"namiyei"
ja:"ナミエオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"owstoni"
ja:"オーストンオオアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucotos"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"brevirostris"
ja:"ハシブトアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"japonicus"
ja:"エゾアカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"major"
sc:"hondoensis"
ja:"アカゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tridactylus"
sc:"inouyei"
ja:"ミユビゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"javensis"
sc:"richardsi"
ja:"キタタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"martius"
sc:"martius"
ja:"クマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"awokera"
ja:"アオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"horii"
ja:"カゴシマアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"awokera"
sc:"takatsukasae"
ja:"タネアオゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"canus"
sc:"jessoensis"
ja:"ヤマゲラ"
}
{
"alien":false,
rank:"subspecies"
upper:"tinnunculus"
sc:"interstinctus"
ja:"チョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"insignis"
ja:"コチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"columbarius"
sc:"pacificus"
ja:"ヒガシコチョウゲンボウ"
}
{
"alien":false,
rank:"subspecies"
upper:"subbuteo"
sc:"subbuteo"
ja:"チゴハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"cherrug"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"japonensis"
ja:"ハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"pealei"
ja:"オオハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"furuitii"
ja:"シマハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"peregrinus"
sc:"anatum"
ja:"アメリカハヤブサ"
}
{
"alien":false,
rank:"subspecies"
upper:"sordida"
sc:"cucullata"
ja:"ズグロヤイロチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucorynchus"
sc:"leucorynchus"
ja:"モリツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melaschistos"
sc:"intermedia"
ja:"アサクラサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"divaricatus"
ja:"サンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"divaricatus"
sc:"tegimae"
ja:"リュウキュウサンショウクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"chinensis"
sc:"diffusus"
ja:"コウライウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrocercus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophaeus"
sc:"leucogenis"
ja:"ハイイロオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"hottentottus"
sc:"brevirostris"
ja:"カンムリオウチュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"azurea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"atrocaudata"
ja:"サンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"atrocaudata"
sc:"illex"
ja:"リュウキュウサンコウチョウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bucephalus"
sc:"bucephalus"
ja:"モズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"lucionensis"
ja:"シマアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"cristatus"
sc:"superciliosus"
ja:"アカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"collurio"
sc:"pallidifrons"
ja:"セアカモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"isabellinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"schach"
sc:"schach"
ja:"タカサゴモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"mollis"
ja:"シベリアオオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"excubitor"
sc:"bianchii"
ja:"オオモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"sphenocercus"
sc:"sphenocercus"
ja:"オオカラモズ"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"brandtii"
ja:"ミヤマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"japonicus"
ja:"カケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"tokugawae"
ja:"サドカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"glandarius"
sc:"orii"
ja:"ヤクシマカケス"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"japonica"
ja:"オナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"macrorhynchos"
ja:"ハシナガホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"caryocatactes"
sc:"japonica"
ja:"ホシガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"monedula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"frugilegus"
sc:"pastinator"
ja:"ミヤマガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corone"
sc:"orientalis"
ja:"ハシボソガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"mandshuricus"
ja:"チョウセンハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"japonensis"
ja:"ハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"connectens"
ja:"リュウキュウハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"macrorhynchos"
sc:"osai"
ja:"オサハシブトガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"corax"
sc:"kamtschaticus"
ja:"ワタリガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"regulus"
sc:"japonensis"
ja:"キクイタダキ"
}
{
"alien":false,
rank:"subspecies"
upper:"pendulinus"
sc:"consobrinus"
ja:"ツリスガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"palustris"
sc:"hensoni"
ja:"ハシブトガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"sachalinensis"
ja:"カラフトコガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"restrictus"
ja:"コガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"varius"
ja:"ヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"namiyei"
ja:"ナミエヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"owstoni"
ja:"オーストンヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"orii"
ja:"ダイトウヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"sunsunpi"
ja:"タネヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"yakushimensis"
ja:"ヤクシマヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"amamii"
ja:"アマミヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"varius"
sc:"olivaceus"
ja:"オリイヤマガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"ater"
sc:"insularis"
ja:"ヒガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"minor"
ja:"シジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"amamiensis"
ja:"アマミシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"okinawae"
ja:"オキナワシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"nigriloris"
ja:"イシガキシジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"minor"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanus"
sc:"tianschanicus"
ja:"ルリガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"biarmicus"
sc:"russicus"
ja:"ヒゲガラ"
}
{
"alien":false,
rank:"subspecies"
upper:"bimaculata"
sc:"torquata"
ja:"クビワコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"brachydactyla"
sc:"longipennis"
ja:"ヒメコウテンシ"
}
{
"alien":false,
rank:"subspecies"
upper:"cheleensis"
sc:"cheleensis"
ja:"コヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"pekinensis"
ja:"オオヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"lonnbergi"
ja:"カラフトチュウヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"arvensis"
sc:"japonica"
ja:"ヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"alpestris"
sc:"flava"
ja:"ハマヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"paludicola"
sc:"chinensis"
ja:"タイワンショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"riparia"
sc:"ijimae"
ja:"ショウドウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"saturata"
ja:"アカハラツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"gutturalis"
ja:"ツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"tahitica"
sc:"namiyei"
ja:"リュウキュウツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"daurica"
sc:"japonica"
ja:"コシアカツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"urbicum"
sc:"lagopodum"
ja:"ニシイワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"dasypus"
sc:"dasypus"
ja:"イワツバメ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"sinensis"
ja:"シロガシラ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"amaurotis"
ja:"ヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"squamiceps"
ja:"オガサワラヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"magnirostris"
ja:"ハシブトヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"borodinonis"
ja:"ダイトウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"ogawae"
ja:"アマミヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"pryeri"
ja:"リュウキュウヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"stejnegeri"
ja:"イシガキヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"amaurotis"
sc:"nagamichii"
ja:"タイワンヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"sakhalinensis"
ja:"カラフトウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"borealis"
ja:"チョウセンウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"cantans"
ja:"ウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"diphone"
ja:"ハシナガウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"restricta"
ja:"ダイトウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"diphone"
sc:"riukiuensis"
ja:"リュウキュウウグイス"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"japonicus"
ja:"シマエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"magnus"
ja:"チョウセンエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"trivirgatus"
ja:"エナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"caudatus"
sc:"kiusiuensis"
ja:"キュウシュウエナガ"
}
{
"alien":false,
rank:"subspecies"
upper:"trochilus"
sc:"yakutensis"
ja:"キタヤナギムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"collybita"
sc:"tristis"
ja:"チフチャフ"
}
{
"alien":false,
rank:"subspecies"
upper:"fuscatus"
sc:"fuscatus"
ja:"ムジセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"affinis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"borealis"
ja:"コムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"borealis"
sc:"kennicotti"
ja:"アメリカコムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"curruca"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"familiare"
ja:"メグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiare"
sc:"hahasima"
ja:"ハハジマメグロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"japonicus"
ja:"メジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"daitoensis"
ja:"ダイトウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"insularis"
ja:"シマメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"japonicus"
sc:"loochooensis"
ja:"リュウキュウメジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"certhiola"
sc:"rubescens"
ja:"シベリアセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"pryeri"
sc:"pryeri"
ja:"オオセッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"fasciolata"
sc:"amnicola"
ja:"エゾセンニュウ"
}
{
"alien":false,
rank:"subspecies"
upper:"bistrigiceps"
sc:"bistrigiceps"
ja:"コヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"agricola"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"aedon"
sc:"stegmanni"
ja:"ハシブトオオヨシキリ"
}
{
"alien":false,
rank:"subspecies"
upper:"juncidis"
sc:"brunniceps"
ja:"セッカ"
}
{
"alien":false,
rank:"subspecies"
upper:"garrulus"
sc:"centralasiae"
ja:"キレンジャク"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"asiatica"
ja:"シロハラゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"amurensis"
ja:"ゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"europaea"
sc:"roseilia"
ja:"キュウシュウゴジュウカラ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"daurica"
ja:"キタキバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"familiaris"
sc:"japonica"
ja:"キバシリ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"dauricus"
ja:"チョウセンミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"fumigatus"
ja:"ミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"mosukei"
ja:"モスケミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"orii"
ja:"ダイトウミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"troglodytes"
sc:"ogawae"
ja:"オガワミソサザイ"
}
{
"alien":false,
rank:"subspecies"
upper:"vulgaris"
sc:"poltaratskyi"
ja:"ホシムクドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasii"
sc:"pallasii"
ja:"カワガラス"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"davisoni"
ja:"マミジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"aurea"
ja:"PI:NAME:<NAME>END_PIツグPI:NAME:<NAME>END_PI"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"major"
ja:"PI:NAME:<NAME>END_PIオトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauma"
sc:"iriomotensis"
ja:"コトラツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"minimus"
sc:"aliciae"
ja:"ハイイロチャツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"merula"
sc:"mandarinus"
ja:"クロウタドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"orii"
ja:"PI:NAME:<NAME>END_PIハラ"
}
{
"alien":false,
rank:"subspecies"
upper:"chrysolaus"
sc:"chrysolaus"
ja:"PI:NAME:<NAME>END_PI"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"atrogularis"
ja:"PI:NAME:<NAME>END_PIドグロツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"ruficollis"
sc:"ruficollis"
ja:"PI:NAME:<NAME>END_PIツPI:NAME:<NAME>END_PI"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"eunomus"
ja:"ツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"naumanni"
sc:"naumanni"
ja:"PI:NAME:<NAME>END_PIチジョウツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"iliacus"
sc:"iliacus"
ja:"PI:NAME:<NAME>END_PIキアカツグミ"
}
{
"alien":false,
rank:"subspecies"
upper:"viscivorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"rubecula"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"akahige"
ja:"コマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"akahige"
sc:"tanensis"
ja:"タネコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"komadori"
ja:"アカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"namiyei"
ja:"ホントウアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"komadori"
sc:"subrufus"
ja:"ウスアカヒゲ"
}
{
"alien":false,
rank:"subspecies"
upper:"svecica"
sc:"svecica"
ja:"オガワコマドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyane"
sc:"bochaiensis"
ja:"コルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanurus"
sc:"cyanurus"
ja:"ルリビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"ochruros"
sc:"rufiventris"
ja:"クロジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"phoenicurus"
sc:"phoenicurus"
ja:"シロビタイジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"auroreus"
sc:"auroreus"
ja:"ジョウビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"torquatus"
sc:"stejnegeri"
ja:"ノビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"caprata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"oenanthe"
sc:"oenanthe"
ja:"ハシグロヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"deserti"
sc:"oreophila"
ja:"サバクヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"pandoo"
ja:"アオハライソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"solitarius"
sc:"philippensis"
ja:"イソヒヨドリ"
}
{
"alien":false,
rank:"subspecies"
upper:"striata"
sc:"mongola"
ja:"ムナフヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibirica"
sc:"sibirica"
ja:"サメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"dauurica"
sc:"dauurica"
ja:"コサメビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"hypoleuca"
sc:"sibirica"
ja:"マダラヒタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"narcissina"
ja:"キビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"narcissina"
sc:"owstoni"
ja:"リュウキュウキビタキ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cumatilis"
ja:"チョウセンオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"cyanomelana"
sc:"cyanomelana"
ja:"オオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"thalassinus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"vivida"
sc:"vivida"
ja:"チャバラオオルリ"
}
{
"alien":false,
rank:"subspecies"
upper:"collaris"
sc:"erythropygia"
ja:"イワヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanella"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"domesticus"
sc:"domesticus"
ja:"イエスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"rutilans"
sc:"rutilans"
ja:"ニュウナイスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"montanus"
sc:"saturatus"
ja:"スズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"plexa"
ja:"シベリアツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"leucocephala"
ja:"カオジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"macronyx"
ja:"キタツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"simillima"
ja:"マミジロツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"flava"
sc:"taivana"
ja:"ツメナガセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"citreola"
sc:"citreola"
ja:"キガシラセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"cinerea"
sc:"cinerea"
ja:"キセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"dukhunensis"
ja:"ニシシベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"personata"
ja:"メンガタハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"alboides"
ja:"ネパールハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"baicalensis"
ja:"シベリアハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"ocularis"
ja:"タイワンハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"lugens"
ja:"ハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"alba"
sc:"leucopsis"
ja:"ホオジロハクセキレイ"
}
{
"alien":false,
rank:"subspecies"
upper:"richardi"
sc:"richardi"
ja:"マミジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"pratensis"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"trivialis"
sc:"trivialis"
ja:"ヨーロッパビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"yunnanensis"
ja:"カラフトビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"hodgsoni"
sc:"hodgsoni"
ja:"ビンズイ"
}
{
"alien":false,
rank:"subspecies"
upper:"gustavi"
sc:"gustavi"
ja:"セジロタヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"rubescens"
sc:"japonicus"
ja:"タヒバリ"
}
{
"alien":false,
rank:"subspecies"
upper:"coelebs"
sc:"coelebs"
ja:"ズアオアトリ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kawarahiba"
ja:"オオカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"minor"
ja:"カワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"sinica"
sc:"kittlitzi"
ja:"オガサワラカワラヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"flammea"
sc:"flammea"
ja:"ベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"hornemanni"
sc:"exilipes"
ja:"コベニヒワ"
}
{
"alien":false,
rank:"subspecies"
upper:"arctoa"
sc:"brunneonucha"
ja:"ハギマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"sibiricus"
sc:"sanguinolentus"
ja:"ベニマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"erythrinus"
sc:"grebnitskii"
ja:"アカマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"kamtschatkensis"
ja:"コバシギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"enucleator"
sc:"sakhalinensis"
ja:"ギンザンマシコ"
}
{
"alien":false,
rank:"subspecies"
upper:"curvirostra"
sc:"japonica"
ja:"イスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucoptera"
sc:"bifasciata"
ja:"ナキイスカ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"cassinii"
ja:"ベニバラウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"rosacea"
ja:"アカウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"pyrrhula"
sc:"griseiventris"
ja:"ウソ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"coccothraustes"
ja:"シベリアシメ"
}
{
"alien":false,
rank:"subspecies"
upper:"coccothraustes"
sc:"japonicus"
ja:"シメ"
}
{
"alien":false,
rank:"subspecies"
upper:"migratoria"
sc:"migratoria"
ja:"コイカル"
}
{
"alien":false,
rank:"subspecies"
upper:"personata"
sc:"personata"
ja:"イカル"
}
{
"alien":false,
rank:"subspecies"
upper:"lapponicus"
sc:"coloratus"
ja:"ツメナガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"vlasowae"
ja:"ユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"nivalis"
sc:"townsendi"
ja:"オオユキホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"coronata"
sc:"coronata"
ja:"キヅタアメリカムシクイ"
}
{
"alien":false,
rank:"subspecies"
upper:"pusilla"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"citrinella"
sc:"erythrogenys"
ja:"キアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"leucocephalos"
sc:"leucocephalos"
ja:"シラガホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"cioides"
sc:"ciopsis"
ja:"ホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"buchanani"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"fucata"
sc:"fucata"
ja:"ホオアカ"
}
{
"alien":false,
rank:"subspecies"
upper:"rustica"
sc:"latifascia"
ja:"カシラダカ"
}
{
"alien":false,
rank:"subspecies"
upper:"elegans"
sc:"elegans"
ja:"ミヤマホオジロ"
}
{
"alien":false,
rank:"subspecies"
upper:"aureola"
sc:"ornata"
ja:"シマアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"spodocephala"
ja:"シベリアアオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"spodocephala"
sc:"personata"
ja:"アオジ"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"polaris"
ja:"シベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"pallasi"
sc:"pallasi"
ja:"オオシベリアジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"yessoensis"
sc:"yessoensis"
ja:"コジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"schoeniclus"
sc:"pyrrhulina"
ja:"オオジュリン"
}
{
"alien":false,
rank:"subspecies"
upper:"iliaca"
sc:"unalaschcensis"
ja:"ゴマフスズメ"
}
{
"alien":false,
rank:"subspecies"
upper:"melodia"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":false,
rank:"subspecies"
upper:"leucophrys"
sc:"gambelii"
ja:"ミヤマシトド"
}
{
"alien":false,
rank:"subspecies"
upper:"sandwichensis"
sc:"sandwichensis"
ja:"サバンナシトド"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"thoracicus"
ja:"コジュケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"thoracicus"
sc:"sonorivox"
ja:"テッケイ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"scintillans"
ja:"ヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"subrufus"
ja:"ウスアカヤマドリ"
}
{
"alien":true,
rank:"subspecies"
upper:"soemmerringii"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"karpowi"
ja:"コウライキジ"
}
{
"alien":true,
rank:"subspecies"
upper:"colchicus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"canadensis"
sc:"moffitti"
ja:"オオカナダガン"
}
{
"alien":true,
rank:"subspecies"
upper:"decaocto"
sc:"decaocto"
ja:"シラコバト"
}
{
"alien":true,
rank:"subspecies"
upper:"himantopus"
sc:"mexicanus"
ja:"クロエリセイタカシギ"
}
{
"alien":true,
rank:"subspecies"
upper:"krameri"
sc:"manillensis"
ja:"ワカケホンセイインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"alexandri"
sc:"fasciata"
ja:"ダルマインコ"
}
{
"alien":true,
rank:"subspecies"
upper:"monachus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"pica"
sc:"serica"
ja:"カササギ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"stejnegeri"
ja:"シチトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"japonicus"
sc:"alani"
ja:"イオウトウメジロ"
}
{
"alien":true,
rank:"subspecies"
upper:"canorus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cineraceus"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"sannio"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"lutea"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"cristatellus"
sc:"cristatellus"
ja:"ハッカチョウ"
}
{
"alien":true,
rank:"subspecies"
upper:"tristis"
sc:"tristis"
ja:"インドハッカ"
}
{
"alien":true,
rank:"subspecies"
upper:"contra"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"intermedius"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"orix"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"melpoda"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"amandava"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"striata"
sc:"ssp."
ja:"亜種不明"
}
{
"alien":true,
rank:"subspecies"
upper:"punctulata"
sc:"topela"
ja:"シマキンパラ"
}
{
"alien":true,
rank:"subspecies"
upper:"atricapilla"
sc:"ssp."
ja:"亜種不明"
}
]
db.createCollection 'names'
rank_relationships =
family: "order"
genus: "family"
species: "genus"
subspecies: "species"
for name in data_names
if db.names.find(name).count() is 0
if name.upper
upper_rank = rank_relationships[name.rank]
upper_id = db.names.find({
rank: upper_rank
sc: name.upper
})[0]._id
name.upper = upper_rank
name.upper_id = upper_id
db.names.insert name
|
[
{
"context": "Creator.Apps.love = \n\turl: \"/app/love\"\n\tname: \"交友\"\n\ticon: \"ion-ios-people-outline\" \n\ticon_slds: \"do",
"end": 49,
"score": 0.9513375759124756,
"start": 47,
"tag": "NAME",
"value": "交友"
}
] | creator/packages/steedos-love/love-app.coffee | yicone/steedos-platform | 42 | Creator.Apps.love =
url: "/app/love"
name: "交友"
icon: "ion-ios-people-outline"
icon_slds: "document"
visible:true
is_creator:true
objects: [
"vip_customers",
"love_about_me",
"love_answer",
"love_answer2",
"love_test",
"love_hobby",
"love_invite_codes",
"love_looking_for",
"love_result",
"love_friends",
"love_educational_experience",
"love_work_experience",
"love_post",
"love_post_comment"
]
| 120343 | Creator.Apps.love =
url: "/app/love"
name: "<NAME>"
icon: "ion-ios-people-outline"
icon_slds: "document"
visible:true
is_creator:true
objects: [
"vip_customers",
"love_about_me",
"love_answer",
"love_answer2",
"love_test",
"love_hobby",
"love_invite_codes",
"love_looking_for",
"love_result",
"love_friends",
"love_educational_experience",
"love_work_experience",
"love_post",
"love_post_comment"
]
| true | Creator.Apps.love =
url: "/app/love"
name: "PI:NAME:<NAME>END_PI"
icon: "ion-ios-people-outline"
icon_slds: "document"
visible:true
is_creator:true
objects: [
"vip_customers",
"love_about_me",
"love_answer",
"love_answer2",
"love_test",
"love_hobby",
"love_invite_codes",
"love_looking_for",
"love_result",
"love_friends",
"love_educational_experience",
"love_work_experience",
"love_post",
"love_post_comment"
]
|
[
{
"context": " urlRoot: '/students'\n defaults:\n name: 'Unknown'\n age: null\n sex: null\n\n beforeEach ->",
"end": 223,
"score": 0.9198020100593567,
"start": 216,
"tag": "NAME",
"value": "Unknown"
},
{
"context": "dent\n @student = new studentModel\n nam... | spec/app/source/modelsSpec.coffee | alexserver/learning-backbone | 0 | # Test Models
sinon = require 'sinon'
describe 'Backbone Models', ->
# Creating a default Model class for all tests
class studentModel extends Backbone.Model
urlRoot: '/students'
defaults:
name: 'Unknown'
age: null
sex: null
beforeEach ->
sinon.stub(jQuery, 'ajax')
# Creating one student
@student = new studentModel
name: 'John New'
age: 17
sex: 'M'
afterEach ->
jQuery.ajax.restore()
it 'Should create a model', ->
# Creating one student
student = new studentModel
name: 'John New'
age: 17
attrs = student.attributes
expect(attrs).toEqual {name: 'John New', age: 17, sex: null}
it 'Should fetch a model', () ->
# Setting student id for fetching
@student.set 'id', 4
@student.fetch
success: sinon.spy()
expect(jQuery.ajax.calledWithMatch { url: '/students/4' }).toBe true
it 'Should save a model', ->
# Saving student model
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: 'John New'
age: 17
sex: 'M'
it 'Should react to changes', (done)->
@student.on 'change', (model) ->
expect(model.previousAttributes()).not.toEqual model.attributes
done()
@student.set 'name', 'Marty McFly'
it 'Should react to a specific attribute change', ->
name_callback = sinon.spy()
age_callback = sinon.spy()
@student.on 'change:name', name_callback
@student.on 'change:age', age_callback
@student.set 'name', 'David Gilmour'
expect(name_callback.called).toBe true
expect(age_callback.called).toBe false
# spy.args[0] references the arguments for the first call.
# So [0][0] references to the first call, first argumet: model
expect(name_callback.args[0][0].get 'name').toEqual 'David Gilmour'
it 'Should save an existing model', ->
@student.set 'id', 4
@student.set 'name', 'Ace Ventura'
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students/4'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: 'Ace Ventura'
age: 17
sex: 'M'
id: 4
| 101621 | # Test Models
sinon = require 'sinon'
describe 'Backbone Models', ->
# Creating a default Model class for all tests
class studentModel extends Backbone.Model
urlRoot: '/students'
defaults:
name: '<NAME>'
age: null
sex: null
beforeEach ->
sinon.stub(jQuery, 'ajax')
# Creating one student
@student = new studentModel
name: '<NAME>'
age: 17
sex: 'M'
afterEach ->
jQuery.ajax.restore()
it 'Should create a model', ->
# Creating one student
student = new studentModel
name: '<NAME>'
age: 17
attrs = student.attributes
expect(attrs).toEqual {name: '<NAME>', age: 17, sex: null}
it 'Should fetch a model', () ->
# Setting student id for fetching
@student.set 'id', 4
@student.fetch
success: sinon.spy()
expect(jQuery.ajax.calledWithMatch { url: '/students/4' }).toBe true
it 'Should save a model', ->
# Saving student model
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: '<NAME>'
age: 17
sex: 'M'
it 'Should react to changes', (done)->
@student.on 'change', (model) ->
expect(model.previousAttributes()).not.toEqual model.attributes
done()
@student.set 'name', '<NAME>'
it 'Should react to a specific attribute change', ->
name_callback = sinon.spy()
age_callback = sinon.spy()
@student.on 'change:name', name_callback
@student.on 'change:age', age_callback
@student.set 'name', '<NAME>'
expect(name_callback.called).toBe true
expect(age_callback.called).toBe false
# spy.args[0] references the arguments for the first call.
# So [0][0] references to the first call, first argumet: model
expect(name_callback.args[0][0].get 'name').toEqual '<NAME>'
it 'Should save an existing model', ->
@student.set 'id', 4
@student.set 'name', '<NAME>'
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students/4'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: '<NAME>'
age: 17
sex: 'M'
id: 4
| true | # Test Models
sinon = require 'sinon'
describe 'Backbone Models', ->
# Creating a default Model class for all tests
class studentModel extends Backbone.Model
urlRoot: '/students'
defaults:
name: 'PI:NAME:<NAME>END_PI'
age: null
sex: null
beforeEach ->
sinon.stub(jQuery, 'ajax')
# Creating one student
@student = new studentModel
name: 'PI:NAME:<NAME>END_PI'
age: 17
sex: 'M'
afterEach ->
jQuery.ajax.restore()
it 'Should create a model', ->
# Creating one student
student = new studentModel
name: 'PI:NAME:<NAME>END_PI'
age: 17
attrs = student.attributes
expect(attrs).toEqual {name: 'PI:NAME:<NAME>END_PI', age: 17, sex: null}
it 'Should fetch a model', () ->
# Setting student id for fetching
@student.set 'id', 4
@student.fetch
success: sinon.spy()
expect(jQuery.ajax.calledWithMatch { url: '/students/4' }).toBe true
it 'Should save a model', ->
# Saving student model
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: 'PI:NAME:<NAME>END_PI'
age: 17
sex: 'M'
it 'Should react to changes', (done)->
@student.on 'change', (model) ->
expect(model.previousAttributes()).not.toEqual model.attributes
done()
@student.set 'name', 'PI:NAME:<NAME>END_PI'
it 'Should react to a specific attribute change', ->
name_callback = sinon.spy()
age_callback = sinon.spy()
@student.on 'change:name', name_callback
@student.on 'change:age', age_callback
@student.set 'name', 'PI:NAME:<NAME>END_PI'
expect(name_callback.called).toBe true
expect(age_callback.called).toBe false
# spy.args[0] references the arguments for the first call.
# So [0][0] references to the first call, first argumet: model
expect(name_callback.args[0][0].get 'name').toEqual 'PI:NAME:<NAME>END_PI'
it 'Should save an existing model', ->
@student.set 'id', 4
@student.set 'name', 'PI:NAME:<NAME>END_PI'
@student.save
success: sinon.spy()
expect jQuery.ajax.calledWithMatch
url: '/students/4'
.toBe true
expect jQuery.ajax.getCall(0).args[0].data
.toEqual JSON.stringify
name: 'PI:NAME:<NAME>END_PI'
age: 17
sex: 'M'
id: 4
|
[
{
"context": "right <%= grunt.template.today(\\\"yyyy-mm-dd\\\") %>, Shawn Mclean*/\\n\"\n build:\n src: \"build/idle.js\"\n ",
"end": 254,
"score": 0.9994148015975952,
"start": 242,
"tag": "NAME",
"value": "Shawn Mclean"
}
] | Gruntfile.coffee | JustinTulloss/Idle.js | 224 | module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON("package.json")
uglify:
options:
banner: "/*! <%= pkg.name %>, copyright <%= grunt.template.today(\"yyyy-mm-dd\") %>, Shawn Mclean*/\n"
build:
src: "build/idle.js"
dest: "build/idle.min.js"
coffee:
compile:
files:
'build/idle.js':'src/*.coffee'
'test/tests.js':'test/*.coffee'
qunit:
all: ['test/*.htm']
# Load the plugins
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-qunit'
# Default task(s).
grunt.registerTask 'build', ['coffee', 'uglify', 'qunit']
grunt.registerTask 'default', 'build'
| 150181 | module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON("package.json")
uglify:
options:
banner: "/*! <%= pkg.name %>, copyright <%= grunt.template.today(\"yyyy-mm-dd\") %>, <NAME>*/\n"
build:
src: "build/idle.js"
dest: "build/idle.min.js"
coffee:
compile:
files:
'build/idle.js':'src/*.coffee'
'test/tests.js':'test/*.coffee'
qunit:
all: ['test/*.htm']
# Load the plugins
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-qunit'
# Default task(s).
grunt.registerTask 'build', ['coffee', 'uglify', 'qunit']
grunt.registerTask 'default', 'build'
| true | module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON("package.json")
uglify:
options:
banner: "/*! <%= pkg.name %>, copyright <%= grunt.template.today(\"yyyy-mm-dd\") %>, PI:NAME:<NAME>END_PI*/\n"
build:
src: "build/idle.js"
dest: "build/idle.min.js"
coffee:
compile:
files:
'build/idle.js':'src/*.coffee'
'test/tests.js':'test/*.coffee'
qunit:
all: ['test/*.htm']
# Load the plugins
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-qunit'
# Default task(s).
grunt.registerTask 'build', ['coffee', 'uglify', 'qunit']
grunt.registerTask 'default', 'build'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.